diff --git a/.gitignore b/.gitignore index e10f8c02a..536a12c3f 100644 --- a/.gitignore +++ b/.gitignore @@ -37,3 +37,5 @@ newrelic_agent.log # Claude local settings .claude/settings.local.json .aider* + +etc/scripts/ce-properties-wizard/ce_properties_wizard/__pycache__ diff --git a/README.md b/README.md index 1e5792a5f..a6eae1523 100644 --- a/README.md +++ b/README.md @@ -110,6 +110,11 @@ If you want to point it at your own GCC or similar binaries, either edit the `et else make a new one with the name `LANG.local.properties`, substituting `LANG` as needed. `*.local.properties` files have the highest priority when loading properties. +For a quick and easy way to add local compilers, use the +[CE Properties Wizard](etc/scripts/ce-properties-wizard/) which automatically detects and configures compilers +for [30+ languages](etc/scripts/ce-properties-wizard/README.md#supported-languages). +See [Adding a Compiler](docs/AddingACompiler.md) for more details. + If you want to support multiple compilers and languages like [godbolt.org](https://godbolt.org), you can use the `bin/ce_install install compilers` command in the [infra](https://github.com/compiler-explorer/infra) project to install all or some of the compilers. Compilers installed in this way can be loaded through the configuration in diff --git a/docs/AddingACompiler.md b/docs/AddingACompiler.md index ef1cac2fa..2db24ee9b 100644 --- a/docs/AddingACompiler.md +++ b/docs/AddingACompiler.md @@ -3,7 +3,54 @@ This document explains how to add a new compiler to Compiler Explorer ("CE" from here on), first for a local instance, and then how to submit PRs to get it into the main CE site. -## Configuration +## Quick method: Using ce-properties-wizard + +The easiest way to add a compiler to your local Compiler Explorer instance is to use the `ce-properties-wizard` tool. This interactive command-line tool automatically detects compiler information and updates your configuration files. + +### Basic usage + +From the Compiler Explorer root directory: + +```bash +# Interactive mode - guides you through the process +etc/scripts/ce-properties-wizard/run.sh + +# Path-first mode - provide compiler path directly +etc/scripts/ce-properties-wizard/run.sh /usr/bin/g++-13 + +# Fully automated mode - accepts all defaults +etc/scripts/ce-properties-wizard/run.sh /usr/bin/g++-13 --yes +``` + +### Examples + +Add a custom GCC installation: +```bash +etc/scripts/ce-properties-wizard/run.sh /opt/gcc-14.2.0/bin/g++ +``` + +Add a cross-compiler: +```bash +etc/scripts/ce-properties-wizard/run.sh /usr/bin/arm-linux-gnueabihf-g++ \ + --name "ARM GCC 11.2" \ + --group arm-gcc \ + --yes +``` + +The wizard will: +- Automatically detect the compiler type, version, and language +- Generate appropriate compiler IDs and display names +- Add the compiler to the correct properties file +- Suggest appropriate groups for organization +- Validate the configuration with `propscheck.py` + +For more options and examples, see the [ce-properties-wizard README](../etc/scripts/ce-properties-wizard/README.md). + +## Manual configuration + +If you need more control or want to understand how the configuration works, read on for the manual approach. + +### Configuration Compiler configuration is done through the `etc/config/c++.*.properties` files (for C++, other languages follow the obvious pattern, replace as needed for your case). @@ -84,9 +131,9 @@ forward if that group is redefined in a higher-priority configuration file (e.g. The `compilerType` option is special: it refers to the Javascript class in `lib/compilers/*.ts` which handles running and handling output for this compiler type. -## Adding a new compiler locally +## Adding a new compiler manually -It should be pretty straightforward to add a compiler of your own. Create a `etc/config/c++.local.properties` file and +If the wizard doesn't work for your use case or you need fine-grained control, you can manually add a compiler. Create a `etc/config/c++.local.properties` file and override the `compilers` list to include your own compiler, and its configuration. Once you've done that, running `make` should pick up the configuration and during startup you should see your compiler diff --git a/etc/scripts/ce-properties-wizard/README.md b/etc/scripts/ce-properties-wizard/README.md new file mode 100644 index 000000000..ef1322b58 --- /dev/null +++ b/etc/scripts/ce-properties-wizard/README.md @@ -0,0 +1,283 @@ +# CE Properties Wizard + +An interactive command-line tool for adding custom compilers to your local Compiler Explorer installation. + +## Features + +- **Automatic Detection**: Detects compiler type and language from the executable path +- **Auto-Discovery**: Automatically finds and adds all compilers in your PATH +- **Interactive Mode**: Guided prompts for configuration +- **Automation Support**: Command-line flags for scripting +- **Group Management**: Automatically adds compilers to appropriate groups +- **Validation**: Validates generated properties with `propscheck.py` +- **Safe Updates**: Only adds/updates, never removes existing configurations + +## Requirements + +The wizard requires Python 3.10+ and Poetry. The run scripts handle all setup automatically. + +## Usage + +### Interactive Mode + +Run without arguments for a fully interactive experience: + +**Linux/macOS:** +```bash +./run.sh +``` + +**Windows:** +```powershell +.\run.ps1 +``` + +### Path-First Mode + +Provide a compiler path to skip the first prompt: + +**Linux/macOS:** +```bash +./run.sh /usr/local/bin/g++-13 +``` + +**Windows:** +```powershell +.\run.ps1 "C:\MinGW\bin\g++.exe" +``` + +### Automated Mode + +Use command-line flags to automate the process: + +**Linux/macOS:** +```bash +./run.sh /usr/local/bin/g++-13 --yes +``` + +**Windows:** +```powershell +.\run.ps1 "C:\MinGW\bin\g++.exe" --yes +``` + +### Full Automation Example + +**Linux/macOS:** +```bash +./run.sh /path/to/compiler \ + --id custom-gcc-13 \ + --name "GCC 13.2.0" \ + --group gcc \ + --options "-std=c++20" \ + --language c++ \ + --yes +``` + +**Windows:** +```powershell +.\run.ps1 "C:\path\to\compiler.exe" ` + --id custom-gcc-13 ` + --name "GCC 13.2.0" ` + --group gcc ` + --options "-std=c++20" ` + --language c++ ` + --yes +``` + +### Auto-Discovery + +Automatically discover and add all compilers in your PATH: + +```bash +./auto_discover_compilers.py --dry-run # Preview what would be found +./auto_discover_compilers.py --languages c++,rust # Add only C++ and Rust compilers +./auto_discover_compilers.py --yes # Add all found compilers automatically +``` + +### Batch Processing + +Add multiple compilers with a simple loop: + +**Linux/macOS:** +```bash +for compiler in /opt/compilers/*/bin/*; do + ./run.sh "$compiler" --yes +done +``` + +**Windows:** +```powershell +Get-ChildItem "C:\Compilers\*\bin\*.exe" | ForEach-Object { + .\run.ps1 $_.FullName --yes +} +``` + +## Command-Line Options + +- `COMPILER_PATH`: Path to the compiler executable (optional in interactive mode) +- `--id`: Compiler ID (auto-generated if not specified) +- `--name`: Display name for the compiler +- `--group`: Compiler group to add to (e.g., gcc, clang) +- `--options`: Default compiler options +- `--language`: Programming language (auto-detected if not specified) +- `--yes, -y`: Skip confirmation prompts +- `--non-interactive`: Run in non-interactive mode with auto-detected values +- `--config-dir`: Path to etc/config directory (auto-detected if not specified) +- `--verify-only`: Only detect and display compiler information without making changes +- `--list-types`: List all supported compiler types and exit +- `--reorganize LANGUAGE`: Reorganize an existing properties file for the specified language +- `--validate-discovery`: Run discovery validation to verify the compiler is detected (default for local environment) +- `--env ENV`: Environment to target (local, amazon, etc.) - defaults to 'local' + +## Supported Languages + +The wizard currently supports: + +**Systems Languages:** +- C++, C, CUDA +- Rust, Zig, V, Odin +- Carbon, Mojo + +**Popular Compiled Languages:** +- D (DMD, LDC, GDC) +- Swift, Nim, Crystal +- Go, Kotlin, Java + +**Functional Languages:** +- Haskell (GHC) +- OCaml, Scala + +**.NET Languages:** +- C#, F# + +**Scripting/Dynamic Languages:** +- Python, Ruby, Julia +- Dart, Elixir, Erlang + +**Other Languages:** +- Fortran, Pascal, Ada +- COBOL, Assembly (NASM, GAS, YASM) + +## Compiler Detection + +The wizard attempts to detect compiler type by running version commands: +- GCC: `--version` +- Clang: `--version` +- Intel: `--version` +- MSVC: `/help` +- NVCC: `--version` +- Rust: `--version` +- Go: `version` +- Python: `--version` + +If detection fails, you can manually specify the compiler type. + +## Configuration Files + +The wizard modifies `.local.properties` files in `etc/config/`. It: +- Preserves existing content and formatting +- Creates backup files before modification +- Adds compilers to groups by default +- Ensures unique compiler IDs + +## Examples + +### Add a custom GCC installation + +**Linux/macOS:** +```bash +./run.sh /opt/gcc-13.2.0/bin/g++ +``` + +**Windows:** +```powershell +.\run.ps1 "C:\TDM-GCC-64\bin\g++.exe" +``` + +### Add a cross-compiler + +**Linux/macOS:** +```bash +./run.sh /usr/bin/arm-linux-gnueabihf-g++ \ + --name "ARM GCC 11.2" \ + --group arm-gcc \ + --yes +``` + +**Windows:** +```powershell +.\run.ps1 "C:\arm-toolchain\bin\arm-none-eabi-g++.exe" ` + --name "ARM GCC 11.2" ` + --group arm-gcc ` + --yes +``` + +### Add a Python interpreter + +**Linux/macOS:** +```bash +./run.sh /usr/local/bin/python3.12 --yes +``` + +**Windows:** +```powershell +.\run.ps1 "C:\Python312\python.exe" --yes +``` + +### Verify compiler detection only + +**Linux/macOS:** +```bash +./run.sh /usr/bin/g++-13 --verify-only +``` + +**Windows:** +```powershell +.\run.ps1 "C:\MinGW\bin\g++.exe" --verify-only +``` + +### List all supported compiler types + +**Linux/macOS:** +```bash +./run.sh --list-types +``` + +**Windows:** +```powershell +.\run.ps1 --list-types +``` + +This will output something like: +``` +Detected compiler information: + Path: /usr/bin/g++-13 + Language: C++ + Compiler Type: gcc + Version: 13.2.0 + Semver: 13.2.0 + Suggested ID: custom-gcc-13-2-0 + Suggested Name: GCC 13.2.0 + Suggested Group: gcc +``` + +## Troubleshooting + +### Compiler not detected +If the wizard can't detect your compiler type, it will prompt you to select one manually. + +### Permission errors +Ensure you have write permissions to the `etc/config` directory. + +### Validation failures +If `propscheck.py` reports errors, check the generated properties file for syntax issues. + +## Development + +To contribute to the wizard: + +1. Format code: `./run.sh --format` +2. Check formatting: `./run.sh --format --check` +3. Run tests: `poetry run pytest` (after `poetry install`) + +The `--format` flag runs black, ruff, and pytype formatters on the codebase. \ No newline at end of file diff --git a/etc/scripts/ce-properties-wizard/auto_discover_compilers.py b/etc/scripts/ce-properties-wizard/auto_discover_compilers.py new file mode 100755 index 000000000..8f599c568 --- /dev/null +++ b/etc/scripts/ce-properties-wizard/auto_discover_compilers.py @@ -0,0 +1,284 @@ +#!/usr/bin/env python3 +""" +CE Compiler Auto-Discovery Tool + +Automatically discovers compilers in PATH directories and adds them using +the CE Properties Wizard. +""" + +import argparse +import os +import subprocess +import sys +from pathlib import Path +from typing import Dict, List, Set + + +# Compiler patterns for each language +COMPILER_PATTERNS = { + 'c++': ['g++', 'g++-*', 'clang++', 'clang++-*', 'icpc*', 'icx*'], + 'c': ['gcc', 'gcc-[0-9]*', 'clang', 'clang-[0-9]*', 'icc*', 'cc'], + 'cuda': ['nvcc*'], + 'rust': ['rustc*'], + 'go': ['go', 'gccgo*'], + 'python': ['python*', 'python3*', 'pypy*'], + 'java': ['javac*', 'java'], + 'fortran': ['gfortran*', 'ifort*', 'ifx*'], + 'pascal': ['fpc'], + 'kotlin': ['kotlin*', 'kotlinc*'], + 'zig': ['zig'], + 'dart': ['dart'], + 'd': ['dmd*', 'ldc*', 'ldc2*', 'gdc*'], + 'swift': ['swift*', 'swiftc*'], + 'nim': ['nim'], + 'crystal': ['crystal'], + 'v': ['v'], + 'haskell': ['ghc*'], + 'ocaml': ['ocaml*'], + 'scala': ['scala*', 'scalac*'], + 'csharp': ['csc*', 'mcs*', 'dotnet'], + 'fsharp': ['fsharpc*', 'dotnet'], + 'ruby': ['ruby*'], + 'julia': ['julia'], + 'elixir': ['elixir*'], + 'erlang': ['erlc*', 'erl'], + 'assembly': ['nasm*', 'yasm*', 'as'], + 'carbon': ['carbon*'], + 'mojo': ['mojo*'], + 'odin': ['odin*'], + 'ada': ['gnatmake*', 'gprbuild*', 'gnat*'], + 'cobol': ['cobc*', 'gnucobol*', 'gcobol*'], +} + +# Default exclude patterns +DEFAULT_EXCLUDES = { + 'wrapper', 'distcc', 'ccache', '-config', 'config-', + '-ar', '-nm', '-ranlib', '-strip', 'filt', 'format', + 'calls', 'flow', 'stat', '-gdb', 'argcomplete', 'build', + 'ldconfig', 'ldconfig.real', '-bpfcc', 'bpfcc', 'scalar', + 'pythongc-bpfcc', 'pythonflow-bpfcc', 'pythoncalls-bpfcc', 'pythonstat-bpfcc' +} + + +def get_path_dirs() -> List[Path]: + """Get all directories from PATH environment variable.""" + path = os.environ.get('PATH', '') + return [Path(p) for p in path.split(':') if p.strip()] + + +def should_exclude(name: str, excludes: Set[str]) -> bool: + """Check if a compiler name should be excluded.""" + return any(exclude in name for exclude in excludes) + + +def find_compilers_in_dir(directory: Path, patterns: List[str], excludes: Set[str]) -> List[Path]: + """Find compilers matching patterns in a directory.""" + compilers = [] + + if not directory.exists() or not directory.is_dir(): + return compilers + + for pattern in patterns: + # Simple glob matching + for compiler in directory.glob(pattern): + if (compiler.is_file() or compiler.is_symlink()) and \ + os.access(compiler, os.X_OK) and \ + not should_exclude(compiler.name, excludes): + compilers.append(compiler) + + return compilers + + +def resolve_duplicates(compilers: List[Path]) -> List[Path]: + """Remove duplicate compilers (same resolved path).""" + seen = set() + unique_compilers = [] + + for compiler in compilers: + try: + resolved = compiler.resolve() + if resolved not in seen: + seen.add(resolved) + unique_compilers.append(compiler) + except (OSError, RuntimeError): + # If we can't resolve, keep the original + unique_compilers.append(compiler) + + return unique_compilers + + +def discover_compilers(languages: List[str], search_dirs: List[Path] = None, + excludes: Set[str] = None) -> Dict[str, List[Path]]: + """Discover compilers for specified languages.""" + if search_dirs is None: + search_dirs = get_path_dirs() + + if excludes is None: + excludes = DEFAULT_EXCLUDES + + discovered = {} + + for language in languages: + if language not in COMPILER_PATTERNS: + print(f"Warning: Unknown language '{language}'", file=sys.stderr) + continue + + patterns = COMPILER_PATTERNS[language] + compilers = [] + + for directory in search_dirs: + compilers.extend(find_compilers_in_dir(directory, patterns, excludes)) + + if compilers: + # Remove duplicates and sort + unique_compilers = resolve_duplicates(compilers) + discovered[language] = sorted(unique_compilers, key=lambda x: x.name) + + return discovered + + +def add_compiler_with_wizard(compiler: Path, language: str, script_dir: Path, + wizard_args: List[str], dry_run: bool) -> bool: + """Add a compiler using the CE Properties Wizard.""" + if dry_run: + return True + + cmd = [ + str(script_dir / 'run.sh'), + str(compiler), + '--yes', + '--language', language + ] + wizard_args + + try: + result = subprocess.run(cmd, capture_output=True, text=True) + return result.returncode == 0 + except Exception: + return False + + +def main(): + parser = argparse.ArgumentParser( + description='CE Compiler Auto-Discovery Tool', + formatter_class=argparse.RawDescriptionHelpFormatter, + epilog=""" +Examples: + %(prog)s # Interactive discovery of all languages + %(prog)s --dry-run # Preview what would be discovered + %(prog)s --languages c++,rust,go # Only discover C++, Rust, and Go + %(prog)s --yes --languages c++,c # Non-interactive C/C++ discovery + """) + + parser.add_argument('--languages', + help='Comma-separated list of languages to discover (default: all)') + parser.add_argument('--search-dirs', + help='Colon-separated search directories (default: PATH dirs)') + parser.add_argument('--exclude', + help='Comma-separated exclude patterns') + parser.add_argument('--dry-run', action='store_true', + help='Show what would be added without making changes') + parser.add_argument('--yes', '-y', action='store_true', + help='Skip confirmation prompts') + parser.add_argument('--config-dir', + help='Path to etc/config directory') + parser.add_argument('--env', default='local', + help='Environment to target (local, amazon, etc.)') + + args = parser.parse_args() + + # Get script directory + script_dir = Path(__file__).parent + wizard_script = script_dir / 'run.sh' + + if not wizard_script.exists(): + print(f"Error: CE Properties Wizard not found at {wizard_script}", file=sys.stderr) + sys.exit(1) + + # Parse languages + if args.languages: + languages = [lang.strip() for lang in args.languages.split(',')] + else: + languages = list(COMPILER_PATTERNS.keys()) + + # Parse search directories + search_dirs = None + if args.search_dirs: + search_dirs = [Path(d) for d in args.search_dirs.split(':') if d.strip()] + + # Parse excludes + excludes = DEFAULT_EXCLUDES.copy() + if args.exclude: + excludes.update(args.exclude.split(',')) + + # Discover compilers + print("CE Compiler Auto-Discovery Tool") + print("=" * 35) + print() + + if args.dry_run: + print("DRY RUN MODE - No compilers will actually be added") + print() + + discovered = discover_compilers(languages, search_dirs, excludes) + + if not discovered: + print("No compilers found matching the specified criteria") + sys.exit(1) + + # Show results + total_count = sum(len(compilers) for compilers in discovered.values()) + print(f"Found {total_count} compilers:") + print() + + for language, compilers in discovered.items(): + print(f"{language.upper()} ({len(compilers)} compilers):") + for compiler in compilers: + print(f" ✓ {compiler}") + print() + + # Confirm before adding + if not args.dry_run and not args.yes: + response = input("Add these compilers? [y/N] ") + if not response.lower().startswith('y'): + print("Operation cancelled") + sys.exit(0) + + if args.dry_run: + print("Dry run complete - no changes made") + sys.exit(0) + + # Add compilers + print("Adding compilers using CE Properties Wizard...") + print() + + wizard_args = [] + if args.config_dir: + wizard_args.extend(['--config-dir', args.config_dir]) + if args.env != 'local': + wizard_args.extend(['--env', args.env]) + + added_count = 0 + failed_count = 0 + + for language, compilers in discovered.items(): + for compiler in compilers: + print(f"Adding {compiler} ({language})...", end=' ') + + if add_compiler_with_wizard(compiler, language, script_dir, wizard_args, args.dry_run): + print("✓") + added_count += 1 + else: + print("✗") + failed_count += 1 + + print() + print("Summary:") + print(f" ✓ Successfully added: {added_count} compilers") + if failed_count > 0: + print(f" ✗ Failed to add: {failed_count} compilers") + print() + print("Auto-discovery complete!") + + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/etc/scripts/ce-properties-wizard/ce_properties_wizard/__init__.py b/etc/scripts/ce-properties-wizard/ce_properties_wizard/__init__.py new file mode 100644 index 000000000..3dd9a155d --- /dev/null +++ b/etc/scripts/ce-properties-wizard/ce_properties_wizard/__init__.py @@ -0,0 +1,3 @@ +"""CE Properties Wizard - Interactive tool for adding compilers to Compiler Explorer.""" + +__version__ = "0.1.0" diff --git a/etc/scripts/ce-properties-wizard/ce_properties_wizard/compiler_detector.py b/etc/scripts/ce-properties-wizard/ce_properties_wizard/compiler_detector.py new file mode 100644 index 000000000..e1e993e05 --- /dev/null +++ b/etc/scripts/ce-properties-wizard/ce_properties_wizard/compiler_detector.py @@ -0,0 +1,1173 @@ +"""Compiler detection logic.""" + +import os +import platform +import re +from pathlib import Path +from typing import Optional, Set, Tuple + +from .models import CompilerInfo, LanguageConfig +from .utils import SubprocessRunner, VersionExtractor, find_ce_lib_directory + + +def get_supported_compiler_types() -> Set[str]: + """Dynamically extract all supported compiler types from lib/compilers/*.ts files.""" + compiler_types = set() + + try: + lib_dir = find_ce_lib_directory() + lib_compilers_dir = lib_dir / "compilers" + except FileNotFoundError: + # Return a minimal fallback set if we can't find the directory + return { + "gcc", + "clang", + "icc", + "icx", + "ifx", + "ifort", + "nvcc", + "rustc", + "golang", + "python", + "java", + "fpc", + "z88dk", + "tinygo", + "other", + } + + # Scan all .ts files in lib/compilers + for ts_file in lib_compilers_dir.glob("*.ts"): + try: + with open(ts_file, "r", encoding="utf-8") as f: + content = f.read() + + # Look for patterns like: static get key() { return 'compiler_type'; } + # Handle both single-line and multi-line formats + patterns = [ + r'static\s+get\s+key\(\)\s*{\s*return\s+[\'"]([^\'"]+)[\'"]', + r'static\s+override\s+get\s+key\(\)\s*{\s*return\s+[\'"]([^\'"]+)[\'"]', + r'static\s+get\s+key\(\)\s*:\s*string\s*{\s*return\s+[\'"]([^\'"]+)[\'"]', + ] + + for pattern in patterns: + matches = re.findall(pattern, content, re.MULTILINE | re.DOTALL) + for match in matches: + compiler_types.add(match.strip()) + + except (IOError, UnicodeDecodeError): + # Skip files that can't be read + continue + + return compiler_types + + +LANGUAGE_CONFIGS = { + "c++": LanguageConfig( + name="C++", + properties_file="c++.local.properties", + compiler_types=["gcc", "clang", "icc", "icx", "win32-vc", "win32-mingw-gcc", "win32-mingw-clang"], + extensions=[".cpp", ".cc", ".cxx", ".c++"], + keywords=["g++", "clang++", "icpc", "icx", "c++", "cl"], + ), + "c": LanguageConfig( + name="C", + properties_file="c.local.properties", + compiler_types=["gcc", "clang", "icc", "icx", "win32-vc", "win32-mingw-gcc", "win32-mingw-clang"], + extensions=[".c"], + keywords=["gcc", "clang", "icc", "cc", "cl"], + ), + "cuda": LanguageConfig( + name="CUDA", + properties_file="cuda.local.properties", + compiler_types=["nvcc", "clang"], + extensions=[".cu", ".cuh"], + keywords=["nvcc", "cuda"], + ), + "rust": LanguageConfig( + name="Rust", + properties_file="rust.local.properties", + compiler_types=["rustc"], + extensions=[".rs"], + keywords=["rustc"], + ), + "go": LanguageConfig( + name="Go", + properties_file="go.local.properties", + compiler_types=["go", "gccgo"], + extensions=[".go"], + keywords=["go", "gccgo"], + ), + "python": LanguageConfig( + name="Python", + properties_file="python.local.properties", + compiler_types=["python", "pypy"], + extensions=[".py"], + keywords=["python", "pypy"], + ), + "java": LanguageConfig( + name="Java", + properties_file="java.local.properties", + compiler_types=["javac"], + extensions=[".java"], + keywords=["javac", "java"], + ), + "fortran": LanguageConfig( + name="Fortran", + properties_file="fortran.local.properties", + compiler_types=["gfortran", "ifort"], + extensions=[".f90", ".f95", ".f03", ".f08", ".f", ".for"], + keywords=["gfortran", "ifort", "fortran"], + ), + "pascal": LanguageConfig( + name="Pascal", + properties_file="pascal.local.properties", + compiler_types=["fpc", "delphi"], + extensions=[".pas", ".pp", ".p"], + keywords=["fpc", "pascal", "delphi"], + ), + "kotlin": LanguageConfig( + name="Kotlin", + properties_file="kotlin.local.properties", + compiler_types=["kotlin"], + extensions=[".kt", ".kts"], + keywords=["kotlin", "kotlinc"], + ), + "zig": LanguageConfig( + name="Zig", + properties_file="zig.local.properties", + compiler_types=["zig"], + extensions=[".zig"], + keywords=["zig"], + ), + "dart": LanguageConfig( + name="Dart", + properties_file="dart.local.properties", + compiler_types=["dart"], + extensions=[".dart"], + keywords=["dart"], + ), + # Popular compiled languages + "d": LanguageConfig( + name="D", + properties_file="d.local.properties", + compiler_types=["dmd", "ldc", "gdc"], + extensions=[".d"], + keywords=["dmd", "ldc", "gdc"], + ), + "swift": LanguageConfig( + name="Swift", + properties_file="swift.local.properties", + compiler_types=["swiftc"], + extensions=[".swift"], + keywords=["swift", "swiftc"], + ), + "nim": LanguageConfig( + name="Nim", + properties_file="nim.local.properties", + compiler_types=["nim"], + extensions=[".nim"], + keywords=["nim"], + ), + "crystal": LanguageConfig( + name="Crystal", + properties_file="crystal.local.properties", + compiler_types=["crystal"], + extensions=[".cr"], + keywords=["crystal"], + ), + "v": LanguageConfig( + name="V", + properties_file="v.local.properties", + compiler_types=["v"], + extensions=[".v"], + keywords=["v"], + ), + # Functional languages + "haskell": LanguageConfig( + name="Haskell", + properties_file="haskell.local.properties", + compiler_types=["ghc"], + extensions=[".hs", ".lhs"], + keywords=["ghc", "haskell"], + ), + "ocaml": LanguageConfig( + name="OCaml", + properties_file="ocaml.local.properties", + compiler_types=["ocamlc", "ocamlopt"], + extensions=[".ml", ".mli"], + keywords=["ocaml"], + ), + "scala": LanguageConfig( + name="Scala", + properties_file="scala.local.properties", + compiler_types=["scalac"], + extensions=[".scala"], + keywords=["scala", "scalac"], + ), + # JVM languages + "csharp": LanguageConfig( + name="C#", + properties_file="csharp.local.properties", + compiler_types=["csharp", "dotnet"], + extensions=[".cs"], + keywords=["csharp", "dotnet", "mcs", "csc"], + ), + "fsharp": LanguageConfig( + name="F#", + properties_file="fsharp.local.properties", + compiler_types=["fsharp", "dotnet"], + extensions=[".fs", ".fsi", ".fsx"], + keywords=["fsharp", "dotnet", "fsharpc"], + ), + # Scripting/Dynamic languages + "ruby": LanguageConfig( + name="Ruby", + properties_file="ruby.local.properties", + compiler_types=["ruby"], + extensions=[".rb"], + keywords=["ruby"], + ), + "julia": LanguageConfig( + name="Julia", + properties_file="julia.local.properties", + compiler_types=["julia"], + extensions=[".jl"], + keywords=["julia"], + ), + "elixir": LanguageConfig( + name="Elixir", + properties_file="elixir.local.properties", + compiler_types=["elixir"], + extensions=[".ex", ".exs"], + keywords=["elixir"], + ), + "erlang": LanguageConfig( + name="Erlang", + properties_file="erlang.local.properties", + compiler_types=["erlc"], + extensions=[".erl", ".hrl"], + keywords=["erlang", "erlc"], + ), + # Assembly and low-level + "assembly": LanguageConfig( + name="Assembly", + properties_file="assembly.local.properties", + compiler_types=["nasm", "gas", "as", "yasm"], + extensions=[".s", ".asm"], + keywords=["nasm", "gas", "as", "yasm", "asm"], + ), + # Modern systems languages + "carbon": LanguageConfig( + name="Carbon", + properties_file="carbon.local.properties", + compiler_types=["carbon"], + extensions=[".carbon"], + keywords=["carbon"], + ), + "mojo": LanguageConfig( + name="Mojo", + properties_file="mojo.local.properties", + compiler_types=["mojo"], + extensions=[".mojo", ".🔥"], + keywords=["mojo"], + ), + "odin": LanguageConfig( + name="Odin", + properties_file="odin.local.properties", + compiler_types=["odin"], + extensions=[".odin"], + keywords=["odin"], + ), + "ada": LanguageConfig( + name="Ada", + properties_file="ada.local.properties", + compiler_types=["gnatmake", "gprbuild"], + extensions=[".adb", ".ads"], + keywords=["ada", "gnat"], + ), + "cobol": LanguageConfig( + name="COBOL", + properties_file="cobol.local.properties", + compiler_types=["gnucobol", "gcobol"], + extensions=[".cob", ".cobol"], + keywords=["cobol", "gnucobol", "gcobol"], + ), +} + + +class CompilerDetector: + """Handles compiler detection and language inference.""" + + def __init__(self, debug: bool = False): + """Initialize the detector. + + Args: + debug: Enable debug output for subprocess commands + """ + self.debug = debug + + def detect_from_path(self, compiler_path: str) -> CompilerInfo: + """Detect compiler information from executable path.""" + if not os.path.isfile(compiler_path): + raise ValueError(f"Compiler not found at: {compiler_path}") + + if not os.access(compiler_path, os.X_OK): + raise ValueError(f"File is not executable: {compiler_path}") + + compiler_name = os.path.basename(compiler_path) + + # Detect language + language = self._detect_language(compiler_path, compiler_name) + + # Detect compiler type and version + compiler_type, version = self._detect_compiler_type_and_version(compiler_path) + + # Detect target platform (for cross-compilers) + target = self._detect_target_platform(compiler_path, compiler_type) + is_cross = self._is_cross_compiler(target) + + # Generate ID based on whether it's a cross-compiler + compiler_id = self._generate_id(compiler_type, version, compiler_name, language, target if is_cross else None) + + # Generate display name + display_name = self._generate_display_name(compiler_type, version, compiler_name, target if is_cross else None) + + # Group will be suggested later by smart group suggestion logic + group = None + + # Detect Java-related properties for Java-based compilers + java_home, runtime = self._detect_java_properties(compiler_type, compiler_path) + + # Detect execution wrapper for specific compilers + execution_wrapper = self._detect_execution_wrapper(compiler_type, compiler_path) + + # Detect MSVC include and library paths + include_path, lib_path = self._detect_msvc_paths(compiler_type, compiler_path, language) + + # Check if this is an MSVC compiler that might need SDK prompting + # We need SDK prompting if it's MSVC but no Windows SDK paths were detected from existing compilers + needs_sdk_prompt = False + if compiler_type == "win32-vc": + # Quick check - do any existing compilers have Windows SDK paths? + try: + from .utils import find_ce_config_directory + from .config_manager import ConfigManager + + config_dir = find_ce_config_directory() + temp_config = ConfigManager(config_dir, "local", debug=self.debug) + properties_path = temp_config.get_properties_path(language) + + if properties_path.exists(): + properties = temp_config.read_properties_file(properties_path) + has_sdk = False + + for key, value in properties.items(): + if key.endswith(".includePath") and isinstance(value, str): + if "/include/" in value and "/um" in value: + has_sdk = True + break + + needs_sdk_prompt = not has_sdk + else: + needs_sdk_prompt = True # No properties file means no SDK paths + + except Exception: + needs_sdk_prompt = True # If we can't check, prompt to be safe + + return CompilerInfo( + id=compiler_id, + name=display_name, + exe=compiler_path, + compiler_type=compiler_type, + version=version, + semver=self._extract_semver(version), + group=group, + language=language, + target=target, + is_cross_compiler=is_cross, + java_home=java_home, + runtime=runtime, + execution_wrapper=execution_wrapper, + include_path=include_path, + lib_path=lib_path, + needs_sdk_prompt=needs_sdk_prompt, + ) + + def _detect_language(self, compiler_path: str, compiler_name: str) -> str: + """Detect programming language from compiler path/name.""" + compiler_lower = compiler_name.lower() + path_lower = compiler_path.lower() + + # Check each language's keywords + for lang_key, config in LANGUAGE_CONFIGS.items(): + for keyword in config.keywords: + if keyword in compiler_lower or keyword in path_lower: + # Special case: differentiate between C and C++ + if lang_key == "c" and ("++" in compiler_lower or "plus" in compiler_lower): + return "c++" + return lang_key + + # Default to C++ if unclear + return "c++" + + def _detect_compiler_type_and_version(self, compiler_path: str) -> Tuple[Optional[str], Optional[str]]: + """Detect compiler type and version by running it.""" + compiler_name = os.path.basename(compiler_path).lower() + + # Special case for Go - use 'version' subcommand instead of flag + if compiler_name == "go" or compiler_name.endswith("/go"): + result = SubprocessRunner.run_with_timeout([compiler_path, "version"], timeout=5) + if result and "go version" in result.stdout.lower(): + version = VersionExtractor.extract_version("go", result.stdout) + return "go", version + + # Special case for Zig - use 'version' subcommand + if compiler_name == "zig" or compiler_name.endswith("/zig"): + result = SubprocessRunner.run_with_timeout([compiler_path, "version"], timeout=5) + if result and result.stdout.strip(): + # Zig version command just outputs the version number + version = result.stdout.strip() + if re.match(r"\d+\.\d+\.\d+", version): + return "zig", version + + # Special case for Kotlin - may need JAVA_HOME environment + if "kotlin" in compiler_name: + # Try to find a suitable JAVA_HOME if not set + original_java_home = os.environ.get("JAVA_HOME") + if not original_java_home: + # Try to infer JAVA_HOME from nearby JDK installations + compiler_dir = Path(compiler_path).parent.parent.parent + for potential_jdk in compiler_dir.glob("jdk-*"): + if potential_jdk.is_dir() and (potential_jdk / "bin" / "java").exists(): + os.environ["JAVA_HOME"] = str(potential_jdk) + break + + # Try version detection with potentially updated JAVA_HOME + for flag in ["-version", "--version"]: + result = SubprocessRunner.run_with_timeout([compiler_path, flag], timeout=10) + if result and ("kotlinc" in result.stderr.lower() or "kotlin" in result.stderr.lower()): + version = VersionExtractor.extract_version("kotlin", result.stderr) + return "kotlin", version + + # Restore original JAVA_HOME if we modified it + if not original_java_home and "JAVA_HOME" in os.environ: + del os.environ["JAVA_HOME"] + + # Try common version flags and subcommands + version_flags = ["--version", "-v", "--help", "-V", "/help", "/?", "version"] + + # Detect if compiler is on a network drive (common for shared compiler installations) + is_network_drive = compiler_path[1:2] == ":" and compiler_path[0].upper() >= "X" + + for flag in version_flags: + # Use longer timeout for --version on network drives (can take 15+ seconds) + if flag == "--version" and is_network_drive: + timeout_value = 20 + elif is_network_drive: + timeout_value = 10 + else: + timeout_value = 2 + + # Try with appropriate timeout + if self.debug: + print(f"Running: {compiler_path} {flag} (timeout: {timeout_value}s)") + + result = SubprocessRunner.run_with_timeout([compiler_path, flag], timeout=timeout_value) + if result is None: + if self.debug: + print(f" -> Command failed or timed out") + continue + + if self.debug: + print(f" -> Command succeeded, return code: {result.returncode}") + if result.stdout: + print(f" -> stdout: {result.stdout[:200]}") + if result.stderr: + print(f" -> stderr: {result.stderr[:200]}") + + + output = (result.stdout + result.stderr).lower() + full_output = result.stdout + result.stderr + + # Detect z88dk first (before Clang) since z88dk mentions clang in its help + if "z88dk" in output: + version = VersionExtractor.extract_version("z88dk", full_output) + return "z88dk", version + + # Detect Clang (before GCC) since clang output may contain 'gnu' + if "clang" in output: + version = VersionExtractor.extract_version("clang", full_output) + + # Check if this is MinGW Clang on Windows + if platform.system() == "Windows": + # Check for MinGW indicators + if ("mingw" in output or "windows-gnu" in output or + "mingw" in compiler_path.lower() or + any(indicator in compiler_path.lower() for indicator in ["mingw", "tdm-gcc", "winlibs"])): + return "win32-mingw-clang", version + + return "clang", version + + # Detect GCC (including MinGW on Windows) + if "gcc" in output or "g++" in output or ("gnu" in output and "clang" not in output): + version = VersionExtractor.extract_version("gcc", full_output) + + # Check if this is MinGW based on version output + if "mingw" in output: + return "win32-mingw-gcc", version + + return "gcc", version + + # Detect Intel Fortran first + if "ifx" in output or "ifort" in output: + version = VersionExtractor.extract_version("intel_fortran", full_output) + if "ifx" in output: + return "ifx", version + else: + return "ifort", version + + # Detect Intel C/C++ + if "intel" in output: + version = VersionExtractor.extract_version("intel", full_output) + if "icx" in output or "dpcpp" in output: + return "icx", version + else: + return "icc", version + + # Detect MSVC + if "microsoft" in output or "msvc" in output: + version = VersionExtractor.extract_version("msvc", full_output) + return "win32-vc", version + + # Detect NVCC + if "nvidia" in output or "nvcc" in output: + version = VersionExtractor.extract_version("nvcc", full_output) + return "nvcc", version + + # Detect Rust + if "rustc" in output: + version = VersionExtractor.extract_version("rust", full_output) + return "rustc", version + + # Detect TinyGo first (before regular Go) + if "tinygo" in output: + version = VersionExtractor.extract_version("tinygo", full_output) + return "tinygo", version + + # Detect Go + if "go version" in output or "gccgo" in output: + version = VersionExtractor.extract_version("go", full_output) + return "go" if "go version" in output else "gccgo", version + + # Detect Python + if "python" in output: + version = VersionExtractor.extract_version("python", full_output) + return "pypy" if "pypy" in output else "python", version + + # Detect Free Pascal + if "free pascal" in output or "fpc" in output: + version = VersionExtractor.extract_version("fpc", full_output) + return "fpc", version + + # Detect Kotlin + if "kotlinc" in output or "kotlin" in output: + version = VersionExtractor.extract_version("kotlin", full_output) + return "kotlin", version + + # Detect Zig + if "zig" in output: + version = VersionExtractor.extract_version("zig", full_output) + return "zig", version + + # Detect Dart + if "dart" in output: + version = VersionExtractor.extract_version("dart", full_output) + return "dart", version + + # Detect D language compilers + if "dmd" in output: + version = VersionExtractor.extract_version("dmd", full_output) + return "dmd", version + if "ldc" in output: + version = VersionExtractor.extract_version("ldc", full_output) + return "ldc", version + if "gdc" in output and "gnu d compiler" in output: + version = VersionExtractor.extract_version("gdc", full_output) + return "gdc", version + + # Detect Swift + if "swift" in output: + version = VersionExtractor.extract_version("swiftc", full_output) + return "swiftc", version + + # Detect Nim + if "nim" in output: + version = VersionExtractor.extract_version("nim", full_output) + return "nim", version + + # Detect Crystal + if "crystal" in output: + version = VersionExtractor.extract_version("crystal", full_output) + return "crystal", version + + # Detect V + if "v " in output or "vlang" in output: + version = VersionExtractor.extract_version("v", full_output) + return "v", version + + # Detect Haskell + if "ghc" in output or "haskell" in output: + version = VersionExtractor.extract_version("ghc", full_output) + return "ghc", version + + # Detect OCaml + if "ocaml" in output: + if "ocamlopt" in output: + version = VersionExtractor.extract_version("ocamlopt", full_output) + return "ocamlopt", version + else: + version = VersionExtractor.extract_version("ocamlc", full_output) + return "ocamlc", version + + # Detect Scala + if "scala" in output: + version = VersionExtractor.extract_version("scalac", full_output) + return "scalac", version + + # Detect C# / .NET + if "c# compiler" in output or "csharp" in output: + version = VersionExtractor.extract_version("csharp", full_output) + return "csharp", version + if "dotnet" in output: + version = VersionExtractor.extract_version("dotnet", full_output) + return "dotnet", version + + # Detect F# + if "f# compiler" in output or "fsharp" in output: + version = VersionExtractor.extract_version("fsharp", full_output) + return "fsharp", version + + # Detect Ruby + if "ruby" in output: + version = VersionExtractor.extract_version("ruby", full_output) + return "ruby", version + + # Detect Julia + if "julia" in output: + version = VersionExtractor.extract_version("julia", full_output) + return "julia", version + + # Detect Elixir + if "elixir" in output: + version = VersionExtractor.extract_version("elixir", full_output) + return "elixir", version + + # Detect Erlang + if "erlang" in output or "erlc" in output: + version = VersionExtractor.extract_version("erlc", full_output) + return "erlc", version + + # Detect Assembly tools + if "nasm" in output: + version = VersionExtractor.extract_version("nasm", full_output) + return "nasm", version + if "yasm" in output: + version = VersionExtractor.extract_version("yasm", full_output) + return "yasm", version + if "gnu assembler" in output: + version = VersionExtractor.extract_version("gas", full_output) + return "gas", version + + # Detect modern systems languages + if "carbon" in output: + version = VersionExtractor.extract_version("carbon", full_output) + return "carbon", version + if "mojo" in output: + version = VersionExtractor.extract_version("mojo", full_output) + return "mojo", version + if "odin" in output: + version = VersionExtractor.extract_version("odin", full_output) + return "odin", version + + # Detect Ada + if "gnatmake" in output or "ada" in output: + version = VersionExtractor.extract_version("gnatmake", full_output) + return "gnatmake", version + + # Detect COBOL + if "gnucobol" in output or "cobol" in output: + version = VersionExtractor.extract_version("gnucobol", full_output) + return "gnucobol", version + + return None, None + + def _extract_semver(self, version: Optional[str]) -> Optional[str]: + """Extract semantic version from version string.""" + return VersionExtractor.extract_semver(version) + + def _detect_target_platform(self, compiler_path: str, compiler_type: Optional[str]) -> Optional[str]: + """Detect the target platform of the compiler.""" + if not compiler_type: + return None + + # Try to get target info using -v flag + result = SubprocessRunner.run_with_timeout([compiler_path, "-v"], timeout=5) + if result: + # Look for Target: line in output + for line in (result.stdout + result.stderr).split("\n"): + if line.strip().startswith("Target:"): + target = line.split(":", 1)[1].strip() + return target + + return None + + def _is_cross_compiler(self, target: Optional[str]) -> bool: + """Determine if this is a cross-compiler based on target.""" + if not target: + return False + + # Get the host platform + host_machine = platform.machine().lower() + + # Normalize host architecture names + host_arch_map = { + "x86_64": ["x86_64", "amd64"], + "i386": ["i386", "i486", "i586", "i686"], + "aarch64": ["aarch64", "arm64"], + "armv7l": ["arm", "armv7"], + } + + # Find normalized host arch + normalized_host = host_machine + for norm_arch, variants in host_arch_map.items(): + if host_machine in variants: + normalized_host = norm_arch + break + + # Extract target architecture + target_parts = target.lower().split("-") + if not target_parts: + return False + + target_arch = target_parts[0] + + # Check if architectures match + for norm_arch, variants in host_arch_map.items(): + if normalized_host in variants and target_arch in variants: + return False + + # If architectures don't match, it's a cross-compiler + return target_arch != normalized_host + + def _generate_id( + self, compiler_type: Optional[str], version: Optional[str], compiler_name: str, language: str, target: Optional[str] = None + ) -> str: + """Generate a unique compiler ID.""" + parts = ["custom"] + + # Add target architecture for cross-compilers + if target: + arch = target.split("-")[0] + parts.append(arch) + + # Add language prefix for C to avoid conflicts with C++ + if language == "c" and compiler_type in ["gcc", "clang", "icc", "icx"]: + parts.append("c") + + # Add compiler type + if compiler_type: + parts.append(compiler_type) + + # Add version + if version: + version_part = version.replace(".", "-") + parts.append(version_part) + elif not compiler_type: + # Use sanitized compiler name as fallback + safe_name = re.sub(r"[^a-zA-Z0-9_-]", "-", compiler_name) + parts.append(safe_name) + + return "-".join(parts) + + def _generate_display_name( + self, compiler_type: Optional[str], version: Optional[str], compiler_name: str, target: Optional[str] = None + ) -> str: + """Generate a display name for the compiler.""" + type_display = { + "gcc": "GCC", + "win32-mingw-gcc": "MinGW GCC", + "clang": "Clang", + "win32-mingw-clang": "MinGW Clang", + "win32-vc": "MSVC", + "icc": "ICC", + "icx": "Intel ICX", + "ifx": "Intel IFX", + "ifort": "Intel Fortran", + "msvc": "MSVC", + "nvcc": "NVCC", + "rustc": "Rust", + "go": "Go", + "gccgo": "GCC Go", + "tinygo": "TinyGo", + "python": "Python", + "pypy": "PyPy", + "fpc": "Free Pascal", + "z88dk": "z88dk", + "zig": "Zig", + "dart": "Dart", + # Popular compiled languages + "dmd": "DMD", + "ldc": "LDC", + "gdc": "GDC", + "swiftc": "Swift", + "nim": "Nim", + "crystal": "Crystal", + "v": "V", + # Functional languages + "ghc": "GHC", + "ocamlc": "OCaml", + "ocamlopt": "OCaml", + "scalac": "Scala", + # .NET languages + "csharp": "C#", + "dotnet": ".NET", + "fsharp": "F#", + # Scripting/Dynamic languages + "ruby": "Ruby", + "julia": "Julia", + "elixir": "Elixir", + "erlc": "Erlang", + # Assembly and low-level + "nasm": "NASM", + "gas": "GAS", + "yasm": "YASM", + # Modern systems languages + "carbon": "Carbon", + "mojo": "Mojo", + "odin": "Odin", + "gnatmake": "Ada", + "gnucobol": "COBOL", + }.get(compiler_type or "", compiler_type.upper() if compiler_type else "") + + parts = [] + + # Add target architecture for cross-compilers + if target: + arch = target.split("-")[0].upper() + parts.append(arch) + + # Add compiler type and version + if compiler_type and version: + parts.append(f"{type_display} {version}") + elif compiler_type: + parts.append(type_display) + else: + parts.append(compiler_name) + + return " ".join(parts) + + def _detect_java_properties( + self, compiler_type: Optional[str], compiler_path: str + ) -> Tuple[Optional[str], Optional[str]]: + """Detect JAVA_HOME and runtime for Java-based compilers. + + Args: + compiler_type: Type of compiler (kotlin, etc.) + compiler_path: Path to the compiler executable + + Returns: + Tuple of (java_home, runtime) paths + """ + if compiler_type != "kotlin": + return None, None + + # For Kotlin, try to detect JAVA_HOME from environment or infer from common locations + java_home = os.environ.get("JAVA_HOME") + + if not java_home: + # Try to infer JAVA_HOME from common locations near the compiler + compiler_dir = Path(compiler_path).parent.parent + + # Look for JDK installations in the same parent directory + parent_dir = compiler_dir.parent + for potential_jdk in parent_dir.glob("jdk-*"): + if potential_jdk.is_dir() and (potential_jdk / "bin" / "java").exists(): + java_home = str(potential_jdk) + break + + # Determine runtime executable + runtime = None + if java_home: + java_exe = Path(java_home) / "bin" / "java" + if java_exe.exists(): + runtime = str(java_exe) + + return java_home, runtime + + def _detect_execution_wrapper(self, compiler_type: Optional[str], compiler_path: str) -> Optional[str]: + """Detect execution wrapper for compilers that need it. + + Args: + compiler_type: Type of compiler (dart, etc.) + compiler_path: Path to the compiler executable + + Returns: + Path to execution wrapper if needed, None otherwise + """ + if compiler_type != "dart": + return None + + # For Dart, look for dartaotruntime in the same bin directory + compiler_dir = Path(compiler_path).parent + dartaotruntime_path = compiler_dir / "dartaotruntime" + + if dartaotruntime_path.exists() and dartaotruntime_path.is_file(): + return str(dartaotruntime_path) + + return None + + def _detect_msvc_paths(self, compiler_type: Optional[str], compiler_path: str, language: str) -> Tuple[Optional[str], Optional[str]]: + """Detect include and library paths for MSVC compilers. + + Args: + compiler_type: Type of compiler (should be "win32-vc" for MSVC) + compiler_path: Path to the compiler executable + + Returns: + Tuple of (include_path, lib_path) strings, or (None, None) if not MSVC + """ + if compiler_type != "win32-vc": + return None, None + + # Convert Windows backslashes to forward slashes for consistency + normalized_path = compiler_path.replace("\\", "/") + + # Extract the base MSVC directory from the compiler path + # Example: Z:/compilers/msvc/14.40.33807-14.40.33811.0/bin/Hostx64/x64/cl.exe + # Should extract: Z:/compilers/msvc/14.40.33807-14.40.33811.0 + + # Look for the pattern /bin/Host*/*/cl.exe and extract base directory + import re + match = re.search(r"^(.+)/bin/Host[^/]+/[^/]+/cl\.exe$", normalized_path, re.IGNORECASE) + if not match: + # Try alternative pattern for different MSVC layouts + match = re.search(r"^(.+)/bin/cl\.exe$", normalized_path, re.IGNORECASE) + + if not match: + self._debug_log(f"DEBUG: Could not extract MSVC base directory from path: {compiler_path}") + return None, None + + base_dir = match.group(1) + self._debug_log(f"DEBUG: Detected MSVC base directory: {base_dir}") + + # Detect architecture from the compiler path + arch = None + if "/hostx64/x64/" in normalized_path.lower(): + arch = "x64" + elif "/hostx86/x86/" in normalized_path.lower(): + arch = "x86" + elif "/hostx64/arm64/" in normalized_path.lower(): + arch = "arm64" + elif "/hostx86/arm/" in normalized_path.lower(): + arch = "arm" + else: + # Default to x64 if we can't detect + arch = "x64" + self._debug_log(f"DEBUG: Could not detect architecture from path, defaulting to x64") + + self._debug_log(f"DEBUG: Detected MSVC architecture: {arch}") + + # Build include path + include_path = f"{base_dir}/include" + + # Build library paths based on architecture + lib_paths = [ + f"{base_dir}/lib", + f"{base_dir}/lib/{arch}", + f"{base_dir}/atlmfc/lib/{arch}", + f"{base_dir}/ifc/{arch}" + ] + + lib_path = ";".join(lib_paths) + + # Detect Windows SDK paths from existing compilers + sdk_include_paths, sdk_lib_paths = self._detect_windows_sdk_paths(language, arch) + + # Combine MSVC paths with Windows SDK paths + if sdk_include_paths: + include_path = f"{include_path};{sdk_include_paths}" + self._debug_log(f"DEBUG: Added Windows SDK include paths: {sdk_include_paths}") + + if sdk_lib_paths: + lib_path = f"{lib_path};{sdk_lib_paths}" + self._debug_log(f"DEBUG: Added Windows SDK library paths: {sdk_lib_paths}") + else: + # Store info that SDK detection failed for later interactive prompting + self._debug_log("DEBUG: Windows SDK auto-detection failed - will prompt user in interactive mode") + + self._debug_log(f"DEBUG: Final MSVC include path: {include_path}") + self._debug_log(f"DEBUG: Final MSVC library paths: {lib_path}") + + return include_path, lib_path + + def _detect_windows_sdk_paths(self, language: str, arch: str) -> Tuple[Optional[str], Optional[str]]: + """Detect Windows SDK paths by scanning existing compiler configurations. + + Args: + language: Programming language (e.g., "c++") + arch: Target architecture (e.g., "x64", "x86", "arm64") + + Returns: + Tuple of (sdk_include_paths, sdk_lib_paths) strings, or (None, None) if not found + """ + try: + from .utils import find_ce_config_directory + from .config_manager import ConfigManager + + # Create a temporary config manager to read existing properties + config_dir = find_ce_config_directory() + temp_config = ConfigManager(config_dir, "local", debug=self.debug) + properties_path = temp_config.get_properties_path(language) + + if not properties_path.exists(): + self._debug_log(f"DEBUG: Properties file not found: {properties_path}") + return None, None + + properties = temp_config.read_properties_file(properties_path) + + # Scan all compiler includePath properties for Windows SDK patterns + sdk_base_path = None + sdk_version = None + + for key, value in properties.items(): + if key.endswith(".includePath") and isinstance(value, str): + self._debug_log(f"DEBUG: Scanning includePath: {key} = {value}") + + # Look for pattern ending with /include//um + import re + match = re.search(r"([^;]+)/include/([^/;]+)/um(?:;|$)", value) + if match: + sdk_base_path = match.group(1) + sdk_version = match.group(2) + self._debug_log(f"DEBUG: Found Windows SDK: base={sdk_base_path}, version={sdk_version}") + break + + if not sdk_base_path or not sdk_version: + self._debug_log("DEBUG: No Windows SDK path found in existing compilers") + return None, None + + # Generate Windows SDK include paths + sdk_include_dirs = [ + f"{sdk_base_path}/include/{sdk_version}/cppwinrt", + f"{sdk_base_path}/include/{sdk_version}/shared", + f"{sdk_base_path}/include/{sdk_version}/ucrt", + f"{sdk_base_path}/include/{sdk_version}/um", + f"{sdk_base_path}/include/{sdk_version}/winrt" + ] + + sdk_include_paths = ";".join(sdk_include_dirs) + + # Generate Windows SDK library paths based on architecture + sdk_lib_dirs = [ + f"{sdk_base_path}/lib/{sdk_version}/ucrt/{arch}", + f"{sdk_base_path}/lib/{sdk_version}/um/{arch}" + ] + + sdk_lib_paths = ";".join(sdk_lib_dirs) + + self._debug_log(f"DEBUG: Generated SDK include paths: {sdk_include_paths}") + self._debug_log(f"DEBUG: Generated SDK library paths: {sdk_lib_paths}") + + return sdk_include_paths, sdk_lib_paths + + except Exception as e: + self._debug_log(f"DEBUG: Error detecting Windows SDK paths: {e}") + return None, None + + def set_windows_sdk_path(self, compiler_info: 'CompilerInfo', sdk_path: Optional[str]) -> 'CompilerInfo': + """Update MSVC compiler info with Windows SDK paths. + + Args: + compiler_info: CompilerInfo object for MSVC compiler + sdk_path: Optional Windows SDK base path (e.g., "Z:/compilers/windows-kits-10") + + Returns: + Updated CompilerInfo with SDK paths added + """ + if compiler_info.compiler_type != "win32-vc" or not sdk_path: + return compiler_info + + # Extract architecture from the compiler path + normalized_path = compiler_info.exe.replace("\\", "/") + arch = "x64" # default + if "/hostx64/x64/" in normalized_path.lower(): + arch = "x64" + elif "/hostx86/x86/" in normalized_path.lower(): + arch = "x86" + elif "/hostx64/arm64/" in normalized_path.lower(): + arch = "arm64" + elif "/hostx86/arm/" in normalized_path.lower(): + arch = "arm" + + # Find the SDK version by looking for the latest version directory + import os + from pathlib import Path + + sdk_base = Path(sdk_path.replace("\\", "/")) + sdk_version = None + + # Look for include directory with version subdirectories + include_dir = sdk_base / "include" + if include_dir.exists(): + # Find the latest version directory (highest version number) + version_dirs = [d.name for d in include_dir.iterdir() if d.is_dir() and d.name.startswith("10.")] + if version_dirs: + sdk_version = sorted(version_dirs, reverse=True)[0] # Get the latest version + self._debug_log(f"DEBUG: Found SDK version: {sdk_version}") + + if not sdk_version: + self._debug_log(f"DEBUG: No SDK version found in {include_dir}") + return compiler_info + + # Generate Windows SDK include paths + sdk_include_dirs = [ + f"{sdk_path}/include/{sdk_version}/cppwinrt", + f"{sdk_path}/include/{sdk_version}/shared", + f"{sdk_path}/include/{sdk_version}/ucrt", + f"{sdk_path}/include/{sdk_version}/um", + f"{sdk_path}/include/{sdk_version}/winrt" + ] + + sdk_include_paths = ";".join(sdk_include_dirs) + + # Generate Windows SDK library paths based on architecture + sdk_lib_dirs = [ + f"{sdk_path}/lib/{sdk_version}/ucrt/{arch}", + f"{sdk_path}/lib/{sdk_version}/um/{arch}" + ] + + sdk_lib_paths = ";".join(sdk_lib_dirs) + + # Combine with existing MSVC paths + if compiler_info.include_path: + compiler_info.include_path = f"{compiler_info.include_path};{sdk_include_paths}" + else: + compiler_info.include_path = sdk_include_paths + + if compiler_info.lib_path: + compiler_info.lib_path = f"{compiler_info.lib_path};{sdk_lib_paths}" + else: + compiler_info.lib_path = sdk_lib_paths + + self._debug_log(f"DEBUG: Added user-provided SDK paths from: {sdk_path}") + self._debug_log(f"DEBUG: SDK include paths: {sdk_include_paths}") + self._debug_log(f"DEBUG: SDK library paths: {sdk_lib_paths}") + + return compiler_info + + def _debug_log(self, message: str): + """Log debug message if debug mode is enabled.""" + if self.debug: + print(message) diff --git a/etc/scripts/ce-properties-wizard/ce_properties_wizard/config_manager.py b/etc/scripts/ce-properties-wizard/ce_properties_wizard/config_manager.py new file mode 100644 index 000000000..eb0c6cc82 --- /dev/null +++ b/etc/scripts/ce-properties-wizard/ce_properties_wizard/config_manager.py @@ -0,0 +1,1097 @@ +"""Configuration file management for CE Properties Wizard.""" + +import re +from collections import OrderedDict +from pathlib import Path +from typing import Dict, List, Optional, Set + +from .compiler_detector import LANGUAGE_CONFIGS +from .models import CompilerInfo +from .surgical_editor import PropertiesFileEditor +from .utils import ArchitectureMapper, create_backup, find_ce_lib_directory + + +def get_supported_instruction_sets() -> Set[str]: + """Dynamically extract all supported instruction sets from lib/instructionsets.ts.""" + instruction_sets = set() + + try: + lib_dir = find_ce_lib_directory() + instructionsets_file = lib_dir / "instructionsets.ts" + except FileNotFoundError: + # Return a minimal fallback set if we can't find the file + return {"amd64", "aarch64", "arm32", "x86", "sparc", "s390x", "powerpc", "mips", "riscv64", "riscv32"} + + try: + with open(instructionsets_file, "r", encoding="utf-8") as f: + content = f.read() + + # Look for instruction set definitions in the supported object + # Pattern: instructionSetName: { + pattern = r"(\w+):\s*{" + matches = re.findall(pattern, content) + for match in matches: + if match not in ["target", "path"]: # Skip property names + instruction_sets.add(match) + + except (IOError, UnicodeDecodeError): + # Return fallback set if file can't be read + return {"amd64", "aarch64", "arm32", "x86", "sparc", "s390x", "powerpc", "mips", "riscv64", "riscv32"} + + return instruction_sets + + +def detect_instruction_set_from_target(target: Optional[str], exe_path: str) -> str: + """Detect instruction set from target platform or executable path.""" + return ArchitectureMapper.detect_instruction_set(target, exe_path) + + +class ConfigManager: + """Manages reading and writing of compiler properties files.""" + + def __init__(self, config_dir: Path, env: str = "local", debug: bool = False): + """Initialize with path to etc/config directory and environment.""" + self.config_dir = config_dir + self.env = env + self.debug = debug + if not self.config_dir.exists(): + raise ValueError(f"Config directory not found: {config_dir}") + + def _debug_log(self, message: str): + """Log debug message if debug mode is enabled.""" + if self.debug: + print(message) + + def get_properties_path(self, language: str) -> Path: + """Get path to properties file for a language in the current environment.""" + if language not in LANGUAGE_CONFIGS: + raise ValueError(f"Unknown language: {language}") + + filename = LANGUAGE_CONFIGS[language].get_properties_file(self.env) + return self.config_dir / filename + + def get_local_properties_path(self, language: str) -> Path: + """Get path to local properties file for a language (for backward compatibility).""" + if language not in LANGUAGE_CONFIGS: + raise ValueError(f"Unknown language: {language}") + + filename = LANGUAGE_CONFIGS[language].properties_file + return self.config_dir / filename + + def read_properties_file(self, file_path: Path) -> OrderedDict: + """Read a properties file and return as ordered dict.""" + properties = OrderedDict() + + if not file_path.exists(): + return properties + + with open(file_path, "r", encoding="utf-8") as f: + for line_num, line in enumerate(f, 1): + line = line.strip() + + # Skip empty lines and comments + if not line or line.startswith("#"): + # Preserve comments and empty lines + properties[f"__comment_{line_num}__"] = line + continue + + # Parse key=value + match = re.match(r"^([^=]+)=(.*)$", line) + if match: + key, value = match.groups() + properties[key.strip()] = value.strip() + else: + # Preserve malformed lines as comments + properties[f"__comment_{line_num}__"] = f"# {line}" + + return properties + + def write_properties_file(self, file_path: Path, properties: OrderedDict): + """Write properties to file, preserving order and comments.""" + # Create backup if file exists + if file_path.exists(): + create_backup(file_path) + + with open(file_path, "w", encoding="utf-8") as f: + previous_key = None + lines_written = [] + + for key, value in properties.items(): + # Add empty line before group definitions (except the first entry) + if ( + previous_key is not None + and isinstance(key, str) + and key.startswith("group.") + and not (isinstance(previous_key, str) and previous_key.startswith("group.")) + ): + lines_written.append("") + + # Add empty line between different groups + elif ( + previous_key is not None + and isinstance(key, str) + and key.startswith("group.") + and isinstance(previous_key, str) + and previous_key.startswith("group.") + ): + # Extract group names from the keys (group.{name}.property) + current_group_name = key.split(".")[1] + previous_group_name = previous_key.split(".")[1] + # Add empty line if we're starting a new group + if current_group_name != previous_group_name: + lines_written.append("") + + # Add empty line before compiler definitions (except the first entry) + elif ( + previous_key is not None + and isinstance(key, str) + and key.startswith("compiler.") + and not (isinstance(previous_key, str) and previous_key.startswith("compiler.")) + ): + lines_written.append("") + + # Add empty line between different compilers + elif ( + previous_key is not None + and isinstance(key, str) + and key.startswith("compiler.") + and isinstance(previous_key, str) + and previous_key.startswith("compiler.") + ): + # Extract compiler IDs from the keys + current_compiler_id = key.split(".")[1] + previous_compiler_id = previous_key.split(".")[1] + # Add empty line if we're starting a new compiler + if current_compiler_id != previous_compiler_id: + lines_written.append("") + + if key.startswith("__comment_"): + lines_written.append(value) + elif key.startswith("__libs_section_"): + lines_written.append(value) + elif key.startswith("__tools_section_"): + lines_written.append(value) + else: + lines_written.append(f"{key}={value}") + + previous_key = key + + # Write all lines and ensure at most 1 trailing newline + for line in lines_written: + f.write(f"{line}\n") + + # No additional newline needed as each line already has one + + def get_existing_compiler_ids(self, language: str) -> Set[str]: + """Get all existing compiler IDs for a language.""" + file_path = self.get_properties_path(language) + if not file_path.exists(): + return set() + + properties = self.read_properties_file(file_path) + compiler_ids = set() + + # Extract compiler IDs from compiler.*.exe entries + for key in properties: + match = re.match(r"^compiler\.([^.]+)\.exe$", key) + if match: + compiler_ids.add(match.group(1)) + + return compiler_ids + + def get_existing_groups(self, properties: OrderedDict) -> Dict[str, List[str]]: + """Extract existing groups and their compilers from properties.""" + groups = {} + + for key, value in properties.items(): + # Match group.*.compilers entries + match = re.match(r"^group\.([^.]+)\.compilers$", key) + if match: + group_name = match.group(1) + # Split compiler list, handling various formats + compilers = [c.strip() for c in re.split(r"[:;,\s]+", value) if c.strip()] + groups[group_name] = compilers + + return groups + + def _extract_compiler_version(self, compiler_exe: str) -> Optional[str]: + """Extract version from compiler executable.""" + import re + import subprocess + + try: + # Try common version flags + version_flags = ["--version", "-version", "-V"] + + for flag in version_flags: + try: + result = subprocess.run([compiler_exe, flag], capture_output=True, text=True, timeout=10) + if result.returncode == 0 and result.stdout: + # Look for version patterns in the output + version_patterns = [ + r"(\d+\.\d+\.\d+)", # x.y.z + r"(\d+\.\d+)", # x.y + r"version\s+(\d+\.\d+\.\d+)", # version x.y.z + r"version\s+(\d+\.\d+)", # version x.y + ] + + for pattern in version_patterns: + match = re.search(pattern, result.stdout, re.IGNORECASE) + if match: + return match.group(1) + + # If no pattern matched, try to extract from the first line + first_line = result.stdout.split("\n")[0] + numbers = re.findall(r"\d+\.\d+(?:\.\d+)?", first_line) + if numbers: + return numbers[0] + + except (subprocess.SubprocessError, FileNotFoundError): + continue + + # If version extraction failed, try to extract from path + # e.g., /opt/compiler-explorer/gcc-14.1.0/bin/gfortran -> 14.1.0 + path_match = re.search(r"gcc-(\d+\.\d+\.\d+)", compiler_exe) + if path_match: + return path_match.group(1) + + path_match = re.search(r"gcc-(\d+\.\d+)", compiler_exe) + if path_match: + return path_match.group(1) + + except Exception: + pass + + return None + + def ensure_compiler_id_unique(self, compiler_id: str, language: str) -> str: + """Ensure compiler ID is unique, modifying if necessary.""" + existing_ids = self.get_existing_compiler_ids(language) + + if compiler_id not in existing_ids: + return compiler_id + + # Try adding numbers until we find a unique ID + for i in range(2, 100): + new_id = f"{compiler_id}-{i}" + if new_id not in existing_ids: + return new_id + + # Fallback to timestamp if somehow we have 98 duplicates + import time + + return f"{compiler_id}-{int(time.time())}" + + def check_existing_compiler_by_path(self, compiler_exe: str, language: str) -> Optional[str]: + """Check if a compiler with the same executable path already exists. + + Returns: + The existing compiler ID if found, None otherwise. + """ + file_path = self.get_properties_path(language) + if not file_path.exists(): + return None + + editor = PropertiesFileEditor(file_path) + + # Use Path objects for robust cross-platform path comparison + from pathlib import Path + input_path = Path(compiler_exe) + + # Look for any compiler with the same exe path + for line in editor.lines: + if ".exe=" in line and line.startswith("compiler."): + match = re.match(r"^compiler\.([^.]+)\.exe=(.+)$", line) + if match: + compiler_id, existing_exe = match.groups() + # Compare using Path objects which handle normalization automatically + existing_path = Path(existing_exe) + if existing_path == input_path: + return compiler_id + + return None + + def suggest_appropriate_group( + self, compiler: CompilerInfo, existing_compiler_id: Optional[str] = None + ) -> Optional[str]: + """Suggest an appropriate group for a compiler based on existing groups. + + Args: + compiler: The compiler information + existing_compiler_id: If this is a duplicate, the ID of the existing compiler + + Returns: + Suggested group name or None if no appropriate group found + """ + file_path = self.get_properties_path(compiler.language) + if not file_path.exists(): + return compiler.compiler_type # Fallback to compiler type + + editor = PropertiesFileEditor(file_path) + + # If this is a duplicate, suggest the group of the existing compiler + if existing_compiler_id: + for line in editor.lines: + if line.startswith("group.") and ".compilers=" in line: + # Check if the existing compiler ID is in this group's compiler list + if ( + f":{existing_compiler_id}" in line + or f"={existing_compiler_id}" in line + or line.endswith(existing_compiler_id) + ): + # Extract group name from group.{name}.compilers line + match = re.match(r"^group\.([^.]+)\.compilers=.*", line) + if match: + return match.group(1) + + # Get compiler's target architecture + target_instruction_set = None + if compiler.target: + target_instruction_set = detect_instruction_set_from_target(compiler.target, compiler.exe) + else: + target_instruction_set = detect_instruction_set_from_target(None, compiler.exe) + + # Debug output + self._debug_log(f"DEBUG: Compiler type: {compiler.compiler_type}") + self._debug_log(f"DEBUG: Detected instruction set: {target_instruction_set}") + self._debug_log(f"DEBUG: Compiler path: {compiler.exe}") + + # Find existing groups and their properties + existing_groups = {} + for line in editor.lines: + if line.startswith("group.") and ".compilers=" in line: + # Extract group name + match = re.match(r"^group\.([^.]+)\.compilers=", line) + if match: + group_name = match.group(1) + existing_groups[group_name] = { + "compilers": line.split("=", 1)[1], + "compiler_type": None, + "compiler_categories": None, + "instruction_set": None, + "group_name": None, + } + + # Get additional properties for each group + for line in editor.lines: + for group_name in existing_groups: + if line.startswith(f"group.{group_name}."): + if ".compilerType=" in line: + existing_groups[group_name]["compiler_type"] = line.split("=", 1)[1] + elif ".compilerCategories=" in line: + existing_groups[group_name]["compiler_categories"] = line.split("=", 1)[1] + elif ".instructionSet=" in line: + existing_groups[group_name]["instruction_set"] = line.split("=", 1)[1] + elif ".groupName=" in line: + existing_groups[group_name]["group_name"] = line.split("=", 1)[1] + + # Score groups based on compatibility + group_scores = [] + + for group_name, group_info in existing_groups.items(): + score = 0 + + # Match instruction set (highest priority - architecture must match) + if target_instruction_set and group_info["instruction_set"] == target_instruction_set: + score += 200 + self._debug_log(f"DEBUG: Group {group_name} instruction set match (+200): {group_info['instruction_set']} == {target_instruction_set}") + + # Special architecture matching for MSVC (when instruction sets match) + if compiler.compiler_type == "win32-vc" and "cl.exe" in compiler.exe.lower(): + if ("hostx64\\x64" in compiler.exe.lower() or "/hostx64/x64" in compiler.exe.lower()) and "x64" in group_name: + score += 150 + self._debug_log(f"DEBUG: Group {group_name} MSVC x64 path match (+150)") + elif ("hostx86\\x86" in compiler.exe.lower() or "/hostx86/x86" in compiler.exe.lower()) and "x86" in group_name: + score += 150 + self._debug_log(f"DEBUG: Group {group_name} MSVC x86 path match (+150)") + elif ("hostx64\\arm64" in compiler.exe.lower() or "/hostx64/arm64" in compiler.exe.lower()) and "arm64" in group_name: + score += 150 + self._debug_log(f"DEBUG: Group {group_name} MSVC arm64 path match (+150)") + + # Match compiler type (high priority) + if group_info["compiler_type"] == compiler.compiler_type: + score += 100 + self._debug_log(f"DEBUG: Group {group_name} compiler type match (+100): {group_info['compiler_type']} == {compiler.compiler_type}") + elif group_info["compiler_categories"] == compiler.compiler_type: + score += 100 + self._debug_log(f"DEBUG: Group {group_name} compiler categories match (+100): {group_info['compiler_categories']} == {compiler.compiler_type}") + elif compiler.compiler_type and compiler.compiler_type.lower() in group_name.lower(): + score += 80 + self._debug_log(f"DEBUG: Group {group_name} name contains compiler type (+80): {compiler.compiler_type} in {group_name}") + + self._debug_log(f"DEBUG: Group {group_name} total score: {score}") + + # Match target architecture in group name (medium priority) + if compiler.target and compiler.is_cross_compiler: + target_arch = compiler.target.split("-")[0].lower() + if group_info["group_name"] and target_arch in group_info["group_name"].lower(): + score += 70 + elif target_arch in group_name.lower(): + score += 60 + + # Prefer groups with similar naming patterns (low priority) + if compiler.compiler_type: + if group_name.lower().startswith(compiler.compiler_type.lower()): + score += 30 + + # For native compilers, prefer groups without cross-architecture indicators + if not compiler.is_cross_compiler: + cross_indicators = ["arm", "aarch64", "mips", "sparc", "powerpc", "riscv", "s390x"] + if not any(arch in group_name.lower() for arch in cross_indicators): + score += 20 + + # Prefer larger groups (more compilers = more established) + if group_info["compilers"]: + compiler_count = len([c for c in group_info["compilers"].split(":") if c.strip()]) + if compiler_count > 10: + score += 15 + elif compiler_count > 5: + score += 10 + elif compiler_count > 1: + score += 5 + + if score > 0: + group_scores.append((score, group_name)) + + # Return the highest scoring group + if group_scores: + group_scores.sort(reverse=True) + return group_scores[0][1] + + # Fallback: create a new group name based on compiler type and architecture + if compiler.is_cross_compiler and compiler.target: + arch = compiler.target.split("-")[0] + return f"{compiler.compiler_type or 'compiler'}{arch}" + else: + return compiler.compiler_type or "custom" + + def add_compiler(self, compiler: CompilerInfo): + """Add a compiler to the configuration using surgical editing.""" + file_path = self.get_properties_path(compiler.language) + + # Ensure unique ID + compiler.id = self.ensure_compiler_id_unique(compiler.id, compiler.language) + + # Ensure semver is always added - if not detected, try to extract it + if not compiler.semver: + compiler.semver = self._extract_compiler_version(compiler.exe) + + # Use surgical editor for minimal changes + editor = PropertiesFileEditor(file_path) + + # Add group to compilers line if not present + if compiler.group: + editor.add_group_to_compilers_line(compiler.group) + + # Create group section if it doesn't exist + if not editor.group_exists(compiler.group): + editor.create_group_section(compiler.group, [compiler.id]) + + # Add group properties + self._add_group_properties_surgical(editor, compiler.group, compiler) + else: + # Add compiler to existing group + editor.add_compiler_to_group(compiler.group, compiler.id) + + # Add missing group properties + self._add_group_properties_surgical(editor, compiler.group, compiler) + + # Create compiler section + editor.create_compiler_section(compiler) + + # Ensure proper spacing after the new compiler + editor.ensure_proper_spacing_after_compiler(compiler.id) + + # Ensure libs and tools sections exist + editor.ensure_libs_tools_sections() + + # Save the file + editor.save_file() + + def _add_group_properties_surgical(self, editor: PropertiesFileEditor, group_name: str, compiler: CompilerInfo): + """Add group properties using surgical editing.""" + # Always add isSemVer=true for new groups + editor.add_group_property(group_name, "isSemVer", "true") + + # Extract architecture and compiler type for naming + if compiler and compiler.is_cross_compiler and compiler.target: + # For cross-compilers, extract arch from target + arch = compiler.target.split("-")[0] + compiler_type = compiler.compiler_type or "compiler" + + # Detect instruction set + instruction_set = detect_instruction_set_from_target(compiler.target, compiler.exe) + + # Set group properties for cross-compilers + editor.add_group_property(group_name, "groupName", f"{compiler_type.title()} {arch}") + editor.add_group_property(group_name, "baseName", f"{arch} {compiler_type}") + editor.add_group_property(group_name, "instructionSet", instruction_set) + else: + # For native compilers + compiler_type = compiler.compiler_type if compiler else group_name + + # Detect instruction set from executable path + instruction_set = detect_instruction_set_from_target(None, compiler.exe if compiler else "") + + # Set group properties for native compilers + editor.add_group_property(group_name, "groupName", f"{compiler_type.title()}") + editor.add_group_property(group_name, "baseName", compiler_type) + editor.add_group_property(group_name, "instructionSet", instruction_set) + + # Add group properties based on known types + if group_name == "gcc" or (compiler and compiler.compiler_type == "gcc"): + editor.add_group_property(group_name, "compilerType", "gcc") + editor.add_group_property(group_name, "compilerCategories", "gcc") + elif group_name == "clang" or (compiler and compiler.compiler_type == "clang"): + editor.add_group_property(group_name, "compilerType", "clang") + editor.add_group_property(group_name, "compilerCategories", "clang") + editor.add_group_property(group_name, "intelAsm", "-mllvm --x86-asm-syntax=intel") + elif group_name in ["icc", "icx"] or (compiler and compiler.compiler_type in ["icc", "icx"]): + editor.add_group_property(group_name, "compilerType", compiler.compiler_type if compiler else group_name) + editor.add_group_property(group_name, "compilerCategories", "intel") + elif group_name == "win32-vc" or (compiler and compiler.compiler_type == "win32-vc"): + # MSVC-specific properties + editor.add_group_property(group_name, "compilerType", "win32-vc") + editor.add_group_property(group_name, "compilerCategories", "msvc") + editor.add_group_property(group_name, "versionFlag", "/?") + editor.add_group_property(group_name, "versionRe", "^.*Microsoft \\(R\\).*$") + editor.add_group_property(group_name, "needsMulti", "false") + editor.add_group_property(group_name, "includeFlag", "/I") + editor.add_group_property(group_name, "options", "/EHsc /utf-8 /MD") + elif compiler and compiler.compiler_type: + # For other known compiler types + editor.add_group_property(group_name, "compilerType", compiler.compiler_type) + + def _add_to_group( + self, properties: OrderedDict, group_name: str, compiler_id: str, compiler: Optional[CompilerInfo] = None + ): + """Add compiler to a group, creating group if necessary.""" + group_key = f"group.{group_name}.compilers" + + # Get existing groups + groups = self.get_existing_groups(properties) + + if group_name in groups: + # Add to existing group if not already there + if compiler_id not in groups[group_name]: + groups[group_name].append(compiler_id) + # Update properties with colon separator + properties[group_key] = ":".join(groups[group_name]) + else: + # Create new group + properties[group_key] = compiler_id + + # Always add isSemVer=true for new groups + properties[f"group.{group_name}.isSemVer"] = "true" + + # Extract architecture and compiler type for naming + if compiler and compiler.is_cross_compiler and compiler.target: + # For cross-compilers, extract arch from target + arch = compiler.target.split("-")[0] + compiler_type = compiler.compiler_type or "compiler" + + # Detect instruction set + instruction_set = detect_instruction_set_from_target(compiler.target, compiler.exe) + + # Set group properties for cross-compilers + properties[f"group.{group_name}.groupName"] = f"{compiler_type.title()} {arch}" + properties[f"group.{group_name}.baseName"] = f"{arch} {compiler_type}" + properties[f"group.{group_name}.instructionSet"] = instruction_set + else: + # For native compilers + compiler_type = compiler.compiler_type if compiler else group_name + + # Detect instruction set from executable path + instruction_set = detect_instruction_set_from_target(None, compiler.exe if compiler else "") + + # Set group properties for native compilers + properties[f"group.{group_name}.groupName"] = f"{compiler_type.title()}" + properties[f"group.{group_name}.baseName"] = compiler_type + properties[f"group.{group_name}.instructionSet"] = instruction_set + + # Add group properties based on known types + if group_name == "gcc" or (compiler and compiler.compiler_type == "gcc"): + properties[f"group.{group_name}.compilerType"] = "gcc" + properties[f"group.{group_name}.compilerCategories"] = "gcc" + elif group_name == "clang" or (compiler and compiler.compiler_type == "clang"): + properties[f"group.{group_name}.compilerType"] = "clang" + properties[f"group.{group_name}.compilerCategories"] = "clang" + properties[f"group.{group_name}.intelAsm"] = "-mllvm --x86-asm-syntax=intel" + elif group_name in ["icc", "icx"] or (compiler and compiler.compiler_type in ["icc", "icx"]): + properties[f"group.{group_name}.compilerType"] = compiler.compiler_type if compiler else group_name + properties[f"group.{group_name}.compilerCategories"] = "intel" + elif compiler and compiler.compiler_type: + # For other known compiler types + properties[f"group.{group_name}.compilerType"] = compiler.compiler_type + + def _reorganize_properties(self, properties: OrderedDict): + """Reorganize properties in the correct order: compilers line, group definitions, compiler definitions.""" + # Find all groups defined in this file + groups = set() + for key in properties: + if isinstance(key, str) and key.startswith("group.") and key.endswith(".compilers"): + # Extract group name from group.{name}.compilers + group_name = key.split(".")[1] + groups.add(group_name) + + # Create new ordered properties + new_properties = OrderedDict() + + # 1. Add compilers line at the top if we have groups + if groups: + compilers_value = ":".join(f"&{group}" for group in sorted(groups)) + new_properties["compilers"] = compilers_value + + # 2. Add other non-group, non-compiler properties (like defaultCompiler, objdumper, etc.) + # But exclude libs and tools which will be added at the end + for key, value in properties.items(): + if ( + key != "compilers" + and key not in ("libs", "tools") + and not key.startswith("group.") + and not key.startswith("compiler.") + and not key.startswith("__comment_") + ): + new_properties[key] = value + + # 3. Add all group definitions, ensuring required fields exist + group_names_processed = set() + for key, value in properties.items(): + if key.startswith("group."): + new_properties[key] = value + + # Track group names and ensure required fields exist + match = re.match(r"^group\.([^.]+)\.compilers$", key) + if match: + group_name = match.group(1) + group_names_processed.add(group_name) + + # Ensure isSemVer exists + if f"group.{group_name}.isSemVer" not in properties: + new_properties[f"group.{group_name}.isSemVer"] = "true" + + # Try to determine if this is a cross-compiler group and add missing fields + # Look for compiler examples in this group to determine architecture + compiler_ids = [c.strip() for c in value.split(":") if c.strip()] + sample_compiler_exe = None + sample_compiler_type = None + sample_target = None + + # Find a sample compiler from this group + for compiler_id in compiler_ids: + exe_key = f"compiler.{compiler_id}.exe" + type_key = f"compiler.{compiler_id}.compilerType" + if exe_key in properties: + sample_compiler_exe = properties[exe_key] + sample_compiler_type = properties.get(type_key, group_name) + # Try to detect if it's a cross-compiler from the exe path + if "-" in sample_compiler_exe and any( + arch in sample_compiler_exe + for arch in ["s390x", "sparc", "aarch64", "arm", "mips", "powerpc"] + ): + # Extract target from path + path_parts = sample_compiler_exe.split("/") + for part in path_parts: + if "-" in part and any( + arch in part for arch in ["s390x", "sparc", "aarch64", "arm", "mips", "powerpc"] + ): + sample_target = part + break + break + + # Add missing groupName if not present + if f"group.{group_name}.groupName" not in properties and sample_compiler_type: + if sample_target: + # Cross-compiler + arch = sample_target.split("-")[0] if sample_target else "unknown" + new_properties[f"group.{group_name}.groupName"] = f"{sample_compiler_type.title()} {arch}" + else: + # Native compiler + new_properties[f"group.{group_name}.groupName"] = f"{sample_compiler_type.title()}" + + # Add missing baseName if not present + if f"group.{group_name}.baseName" not in properties and sample_compiler_type: + if sample_target: + # Cross-compiler + arch = sample_target.split("-")[0] if sample_target else "unknown" + new_properties[f"group.{group_name}.baseName"] = f"{arch} {sample_compiler_type}" + else: + # Native compiler + new_properties[f"group.{group_name}.baseName"] = sample_compiler_type + + # Add missing instructionSet if not present + if f"group.{group_name}.instructionSet" not in properties: + if sample_compiler_exe: + instruction_set = detect_instruction_set_from_target(sample_target, sample_compiler_exe) + new_properties[f"group.{group_name}.instructionSet"] = instruction_set + + # 4. Add all compiler definitions, ensuring semver fields exist and removing name if semver exists + compiler_ids_processed = set() + compiler_semvers = {} # Track which compilers have semver + + # First pass: collect all compiler properties and track semvers + for key, value in properties.items(): + if key.startswith("compiler."): + match = re.match(r"^compiler\.([^.]+)\.(.+)$", key) + if match: + compiler_id, prop_type = match.groups() + if prop_type == "semver": + compiler_semvers[compiler_id] = value + elif prop_type == "exe": + compiler_ids_processed.add(compiler_id) + # Check if this compiler has a semver field + semver_key = f"compiler.{compiler_id}.semver" + if semver_key not in properties: + # Try to extract semver from the compiler executable + semver = self._extract_compiler_version(value) + if semver: + compiler_semvers[compiler_id] = semver + + # Second pass: add properties, skipping name if semver exists + for key, value in properties.items(): + if key.startswith("compiler."): + match = re.match(r"^compiler\.([^.]+)\.(.+)$", key) + if match: + compiler_id, prop_type = match.groups() + + # Skip name property if this compiler has semver + if prop_type == "name" and compiler_id in compiler_semvers: + continue + + new_properties[key] = value + + # Add any newly extracted semvers + for compiler_id, semver in compiler_semvers.items(): + semver_key = f"compiler.{compiler_id}.semver" + if semver_key not in new_properties: + new_properties[semver_key] = semver + + # 5. Add libs= and tools= at the end (preserve existing values if they exist) + # Always add the libs section header comments right before libs + # (any duplicates from old __comment_ entries will be filtered out in step 6) + new_properties["__libs_section_empty__"] = "" + new_properties["__libs_section_border1__"] = "#################################" + new_properties["__libs_section_border2__"] = "#################################" + new_properties["__libs_section_title__"] = "# Installed libs" + + if "libs" in properties: + new_properties["libs"] = properties["libs"] + else: + new_properties["libs"] = "" + + # Add tools section header comments + new_properties["__tools_section_empty__"] = "" + new_properties["__tools_section_border1__"] = "#################################" + new_properties["__tools_section_border2__"] = "#################################" + new_properties["__tools_section_title__"] = "# Installed tools" + + if "tools" in properties: + new_properties["tools"] = properties["tools"] + else: + new_properties["tools"] = "" + + # 6. Add comments at the end (but exclude libs/tools section comments and empty lines to avoid duplicates) + for key, value in properties.items(): + if ( + key.startswith("__comment_") + and not key.startswith("__comment_libs_header") + and not key.startswith("__libs_section_") + and not key.startswith("__tools_section_") + and "# Installed libs" not in str(value) + and "# Installed tools" not in str(value) + and "#################################" not in str(value) + and str(value).strip() != "" + ): # Filter out empty comment lines + new_properties[key] = value + + # Replace the properties with the new ordered dict + properties.clear() + properties.update(new_properties) + + def reorganize_existing_file(self, language: str): + """Add missing properties to an existing file using surgical editing.""" + file_path = self.get_properties_path(language) + if not file_path.exists(): + return + + # Use surgical editor for minimal changes + editor = PropertiesFileEditor(file_path) + + # Add missing semver fields to existing compilers + self._add_missing_semver_fields_surgical(editor) + + # Add missing group properties to existing groups + self._add_missing_group_properties_surgical(editor) + + # Ensure libs and tools sections exist + editor.ensure_libs_tools_sections() + + # Save the file + editor.save_file() + + def _add_missing_semver_fields_surgical(self, editor: PropertiesFileEditor): + """Add missing semver fields to existing compilers using surgical editing.""" + # Find all compiler .exe properties + for i, line in enumerate(editor.lines): + if ".exe=" in line and line.startswith("compiler."): + # Extract compiler ID + match = re.match(r"^compiler\.([^.]+)\.exe=(.+)$", line) + if match: + compiler_id, exe_path = match.groups() + + # Check if semver already exists + has_semver = any(line.startswith(f"compiler.{compiler_id}.semver=") for line in editor.lines) + if not has_semver: + # Try to extract semver + semver = self._extract_compiler_version(exe_path) + if semver: + editor.add_compiler_property(compiler_id, "semver", semver) + + # Remove name if semver exists or was just added + has_semver_after = any(line.startswith(f"compiler.{compiler_id}.semver=") for line in editor.lines) + if has_semver_after: + # Remove name property if it exists + for j, name_line in enumerate(editor.lines): + if name_line.startswith(f"compiler.{compiler_id}.name="): + editor.lines.pop(j) + break + + def _add_missing_group_properties_surgical(self, editor: PropertiesFileEditor): + """Add missing group properties to existing groups using surgical editing.""" + # Find all group.*.compilers properties + for i, line in enumerate(editor.lines): + if ".compilers=" in line and line.startswith("group."): + # Extract group name + match = re.match(r"^group\.([^.]+)\.compilers=(.*)$", line) + if match: + group_name, compilers_list = match.groups() + + # Add missing properties + editor.add_group_property(group_name, "isSemVer", "true") + + # Try to determine compiler type and architecture from first compiler + if compilers_list: + first_compiler_id = compilers_list.split(":")[0].strip() + if first_compiler_id.startswith("&"): + first_compiler_id = first_compiler_id[1:] + + # Find the first compiler's exe path to determine properties + compiler_exe = None + compiler_type = None + target = None + + for exe_line in editor.lines: + if exe_line.startswith(f"compiler.{first_compiler_id}.exe="): + compiler_exe = exe_line.split("=", 1)[1] + break + + for type_line in editor.lines: + if type_line.startswith(f"compiler.{first_compiler_id}.compilerType="): + compiler_type = type_line.split("=", 1)[1] + break + + # Detect if it's a cross-compiler from the exe path + is_cross = False + if compiler_exe and "-" in compiler_exe: + cross_indicators = ["s390x", "sparc", "aarch64", "arm", "mips", "powerpc"] + if any(arch in compiler_exe for arch in cross_indicators): + is_cross = True + # Extract target from path + path_parts = compiler_exe.split("/") + for part in path_parts: + if "-" in part and any(arch in part for arch in cross_indicators): + target = part + break + + # Set group properties + if is_cross and target: + # Cross-compiler + arch = target.split("-")[0] if target else "unknown" + comp_type = compiler_type or group_name + editor.add_group_property(group_name, "groupName", f"{comp_type.title()} {arch}") + editor.add_group_property(group_name, "baseName", f"{arch} {comp_type}") + instruction_set = detect_instruction_set_from_target(target, compiler_exe or "") + editor.add_group_property(group_name, "instructionSet", instruction_set) + else: + # Native compiler + comp_type = compiler_type or group_name + editor.add_group_property(group_name, "groupName", f"{comp_type.title()}") + editor.add_group_property(group_name, "baseName", comp_type) + instruction_set = detect_instruction_set_from_target(None, compiler_exe or "") + editor.add_group_property(group_name, "instructionSet", instruction_set) + + # Add type-specific properties + if comp_type == "gcc" or group_name == "gcc": + editor.add_group_property(group_name, "compilerType", "gcc") + editor.add_group_property(group_name, "compilerCategories", "gcc") + elif comp_type == "clang" or group_name == "clang": + editor.add_group_property(group_name, "compilerType", "clang") + editor.add_group_property(group_name, "compilerCategories", "clang") + editor.add_group_property(group_name, "intelAsm", "-mllvm --x86-asm-syntax=intel") + elif comp_type in ["icc", "icx"] or group_name in ["icc", "icx"]: + editor.add_group_property(group_name, "compilerType", comp_type or group_name) + editor.add_group_property(group_name, "compilerCategories", "intel") + elif comp_type: + editor.add_group_property(group_name, "compilerType", comp_type) + + def validate_with_discovery(self, language: str, compiler_id: str) -> tuple[bool, str, Optional[str]]: + """Validate that the compiler is discovered by running npm run dev with discovery-only.""" + import json + import os + import subprocess + import tempfile + + # Check if local properties file exists - skip validation if it does (only for non-local environments) + if self.env != "local": + local_file = self.get_local_properties_path(language) + if local_file.exists(): + return ( + True, + f"Skipping discovery validation for {self.env} environment because {local_file.name} exists", + None, + ) + + # Create a temporary file for discovery output + with tempfile.NamedTemporaryFile(mode="w+", suffix=".json", delete=False) as f: + discovery_file = f.name + + try: + # Find the main CE directory (go up from etc/config to the root) + ce_root = self.config_dir.parent.parent + + # Run npm run dev with discovery-only, including environment if not local + # On Windows, we might need to use npm.cmd instead of npm + import platform + npm_cmd = "npm.cmd" if platform.system() == "Windows" else "npm" + cmd = [npm_cmd, "run", "dev", "--", "--language", language] + if self.env != "local": + cmd.extend(["--env", self.env]) + cmd.extend(["--discovery-only", discovery_file]) + + print(f"DEBUG: Running discovery command: {' '.join(cmd)}") + print(f"DEBUG: Working directory: {ce_root}") + + result = subprocess.run( + cmd, cwd=ce_root, capture_output=True, text=True, timeout=60 # Discovery should be relatively fast + ) + + if result.returncode != 0: + error_msg = result.stderr.strip() or result.stdout.strip() + # On Windows, if npm fails due to PATH issues, make discovery optional + if platform.system() == "Windows" and ("Is a directory" in error_msg or "not found" in error_msg): + return True, f"Discovery validation skipped on Windows (npm PATH issue): {error_msg}", None + return False, f"Discovery command failed: {error_msg}", None + + # Read and parse the discovery JSON + if not os.path.exists(discovery_file): + return False, f"Discovery file not created: {discovery_file}", None + + with open(discovery_file, "r") as f: + discovery_data = json.load(f) + + # Check if our compiler ID is in the discovery results + # The discovery JSON structure might vary, so let's be defensive + compilers = [] + if isinstance(discovery_data, dict): + compilers = discovery_data.get("compilers", []) + elif isinstance(discovery_data, list): + compilers = discovery_data + + found_compiler = None + + for compiler in compilers: + # Handle both dict and potentially other formats + if isinstance(compiler, dict): + if compiler.get("id") == compiler_id: + found_compiler = compiler + break + elif isinstance(compiler, str): + # If it's just a string ID + if compiler == compiler_id: + found_compiler = {"id": compiler, "name": compiler} + break + + if found_compiler: + compiler_name = found_compiler.get("name", found_compiler.get("id", "unknown")) + # Extract semver from the discovered compiler + discovered_semver = found_compiler.get("semver") or found_compiler.get("version") + return True, f"Compiler '{compiler_id}' successfully discovered as '{compiler_name}'", discovered_semver + else: + # List available compiler IDs for debugging + available_ids = [] + for c in compilers[:10]: # First 10 for brevity + if isinstance(c, dict): + available_ids.append(c.get("id", "unknown")) + elif isinstance(c, str): + available_ids.append(c) + else: + available_ids.append(str(c)) + + # Also show the raw structure for debugging + data_preview = ( + str(discovery_data)[:200] + "..." if len(str(discovery_data)) > 200 else str(discovery_data) + ) + return ( + False, + f"Compiler '{compiler_id}' not found in discovery results. " + f"Available IDs (first 10): {available_ids}. " + f"Data structure preview: {data_preview}", + None, + ) + + except subprocess.TimeoutExpired: + return False, "Discovery validation timed out (60s)", None + except json.JSONDecodeError as e: + return False, f"Discovery JSON parse error: {str(e)}", None + except Exception as e: + # On Windows, if subprocess fails due to npm not being found, make discovery optional + import platform + if platform.system() == "Windows" and ("The system cannot find the file specified" in str(e) or "WinError 2" in str(e)): + return True, f"Discovery validation skipped on Windows (npm not found): {str(e)}", None + return False, f"Discovery validation error: {str(e)}", None + finally: + # Clean up temporary file + try: + if os.path.exists(discovery_file): + os.unlink(discovery_file) + except OSError: + pass # Ignore cleanup errors + + def validate_with_propscheck(self, language: str) -> tuple[bool, str]: + """Validate properties file with propscheck.py.""" + propscheck_path = self.config_dir.parent / "scripts" / "util" / "propscheck.py" + if not propscheck_path.exists(): + return True, "Warning: propscheck.py not found, skipping validation" + + file_path = self.get_properties_path(language) + if not file_path.exists(): + return True, f"No {self.env} properties file to validate" + + import subprocess + + try: + # propscheck.py takes --config-dir parameter and --check-local for local properties files + # Use the same Python interpreter that's running this script + import sys + cmd = [sys.executable, str(propscheck_path), "--config-dir", str(self.config_dir)] + if self.env == "local": + cmd.append("--check-local") + else: + # For other environments, we might need to add specific flags or just run without --check-local + # This depends on how propscheck.py handles non-local environments + cmd.append("--check-local") # Keep this for now, may need adjustment + + print(f"DEBUG: Running propscheck command: {' '.join(cmd)}") + result = subprocess.run(cmd, capture_output=True, text=True, timeout=10) + + if result.returncode == 0: + return True, "Properties validated successfully" + else: + error_output = result.stdout + result.stderr + # Always return the validation output so we can learn from the issues + return False, f"Validation issues detected:\n{error_output}" + + except subprocess.TimeoutExpired: + return False, "Validation timed out" + except Exception as e: + return False, f"Validation error: {str(e)}" diff --git a/etc/scripts/ce-properties-wizard/ce_properties_wizard/main.py b/etc/scripts/ce-properties-wizard/ce_properties_wizard/main.py new file mode 100644 index 000000000..919fd5fa8 --- /dev/null +++ b/etc/scripts/ce-properties-wizard/ce_properties_wizard/main.py @@ -0,0 +1,541 @@ +"""Main CLI entry point for CE Properties Wizard.""" + +import os +import shlex +import sys +from pathlib import Path +from typing import Optional + +import click +import inquirer +from colorama import Fore, Style, init + +from .compiler_detector import LANGUAGE_CONFIGS, CompilerDetector, get_supported_compiler_types +from .config_manager import ConfigManager +from .models import CompilerInfo +from .utils import find_ce_config_directory + +# Initialize colorama for cross-platform color support +init(autoreset=True) + + +def print_success(message: str): + """Print success message in green.""" + click.echo(f"{Fore.GREEN}✓ {message}{Style.RESET_ALL}") + + +def print_error(message: str): + """Print error message in red.""" + click.echo(f"{Fore.RED}✗ {message}{Style.RESET_ALL}", err=True) + + +def print_info(message: str): + """Print info message in blue.""" + click.echo(f"{Fore.BLUE}ℹ {message}{Style.RESET_ALL}") + + +def print_warning(message: str): + """Print warning message in yellow.""" + click.echo(f"{Fore.YELLOW}⚠ {message}{Style.RESET_ALL}") + + +def format_compiler_options(options_input: str) -> str: + """Format compiler options properly. + + Takes space-separated options and quotes any that contain spaces. + + Args: + options_input: Raw options string from user input + + Returns: + Properly formatted options string with quoted options containing spaces + """ + if not options_input or not options_input.strip(): + return "" + + # Split by spaces but respect quoted strings + try: + options = shlex.split(options_input) + except ValueError: + # If shlex fails (unmatched quotes), fall back to simple split + options = options_input.split() + + # Format each option - quote it if it contains spaces + formatted_options = [] + for opt in options: + opt = opt.strip() + if opt: + if " " in opt and not (opt.startswith('"') and opt.endswith('"')): + formatted_options.append(f'"{opt}"') + else: + formatted_options.append(opt) + + return " ".join(formatted_options) + + +@click.command() +@click.argument("compiler_path", required=False) +@click.option("--id", "compiler_id", help="Compiler ID (auto-generated if not specified)") +@click.option("--name", "display_name", help="Display name for the compiler") +@click.option("--group", help="Compiler group to add to") +@click.option("--options", help="Default compiler options") +@click.option("--language", help="Programming language (auto-detected if not specified)") +@click.option("--yes", "-y", is_flag=True, help="Skip confirmation prompts") +@click.option("--non-interactive", is_flag=True, help="Run in non-interactive mode with auto-detected values") +@click.option("--config-dir", type=click.Path(exists=True), help="Path to etc/config directory") +@click.option("--verify-only", is_flag=True, help="Only detect and display compiler information without making changes") +@click.option("--list-types", is_flag=True, help="List all supported compiler types and exit") +@click.option("--reorganize", help="Reorganize an existing properties file for the specified language") +@click.option( + "--validate-discovery", + is_flag=True, + help="Run discovery validation to verify the compiler is detected (default for local environment)", +) +@click.option("--env", default="local", help="Environment to target (local, amazon, etc.)") +@click.option("--debug", is_flag=True, help="Enable debug output including subprocess commands") +def cli( + compiler_path: Optional[str], + compiler_id: Optional[str], + display_name: Optional[str], + group: Optional[str], + options: Optional[str], + language: Optional[str], + yes: bool, + non_interactive: bool, + config_dir: Optional[str], + verify_only: bool, + list_types: bool, + reorganize: Optional[str], + validate_discovery: bool, + env: str, + debug: bool, +): + """CE Properties Wizard - Add compilers to your Compiler Explorer installation. + + Examples: + ce-props-wizard # Interactive mode (local environment) + ce-props-wizard /usr/bin/g++-13 # Path-first mode + ce-props-wizard /usr/bin/g++-13 --yes # Automated mode + ce-props-wizard --env amazon /usr/bin/g++ # Target amazon environment + ce-props-wizard --list-types # List all supported compiler types + ce-props-wizard /usr/bin/g++ --verify-only # Just detect compiler info + """ + # Handle --list-types flag + if list_types: + try: + supported_types = get_supported_compiler_types() + click.echo(f"Found {len(supported_types)} supported compiler types:\n") + for compiler_type in sorted(supported_types): + click.echo(compiler_type) + sys.exit(0) + except Exception as e: + print_error(f"Error reading compiler types: {e}") + sys.exit(1) + + # Handle --reorganize flag + if reorganize: + try: + # Find config directory + if config_dir: + config_mgr = ConfigManager(Path(config_dir), env, debug=debug) + else: + config_mgr = ConfigManager(find_ce_config_directory(), env, debug=debug) + + print_info(f"Reorganizing {reorganize} properties file...") + + # Check if language is valid + if reorganize not in LANGUAGE_CONFIGS: + print_error(f"Unknown language: {reorganize}") + print_info(f"Available languages: {', '.join(LANGUAGE_CONFIGS.keys())}") + sys.exit(1) + + file_path = config_mgr.get_properties_path(reorganize) + if not file_path.exists(): + print_error(f"No {env} properties file found for {reorganize}: {file_path}") + sys.exit(1) + + config_mgr.reorganize_existing_file(reorganize) + print_success(f"Reorganized {file_path}") + + # Validate with propscheck + print_info("Validating with propscheck.py...") + valid, message = config_mgr.validate_with_propscheck(reorganize) + if valid: + print_success(message) + else: + print_error(message) + + sys.exit(0) + except Exception as e: + print_error(f"Error reorganizing file: {e}") + sys.exit(1) + + # Skip banner in verify-only mode + if not verify_only: + click.echo(f"{Fore.CYAN}{'='*60}{Style.RESET_ALL}") + click.echo(f"{Fore.CYAN}Compiler Explorer Properties Wizard{Style.RESET_ALL}") + click.echo(f"{Fore.CYAN}{'='*60}{Style.RESET_ALL}\n") + + try: + # Find config directory only if needed + if not verify_only: + if config_dir: + config_path = Path(config_dir) + else: + config_path = find_ce_config_directory() + print_info(f"Using config directory: {config_path}") + print_info(f"Targeting environment: {env}") + config_mgr = ConfigManager(config_path, env, debug=debug) + else: + config_mgr = None + + # Initialize detector + detector = CompilerDetector(debug=debug) + + # Get compiler path if not provided + if not compiler_path: + questions = [ + inquirer.Text( + "compiler_path", + message="Enter the full path to the compiler executable", + validate=lambda _, x: os.path.isfile(x), + ) + ] + answers = inquirer.prompt(questions) + if not answers: + print_error("Cancelled by user") + sys.exit(1) + compiler_path = answers["compiler_path"] + + # Validate compiler path + compiler_path = os.path.abspath(compiler_path) + if not os.path.isfile(compiler_path): + print_error(f"Compiler not found: {compiler_path}") + sys.exit(1) + + if not os.access(compiler_path, os.X_OK): + print_error(f"File is not executable: {compiler_path}") + sys.exit(1) + + # Detect compiler information + print_info("Detecting compiler type and language...") + try: + detected_info = detector.detect_from_path(compiler_path) + + if detected_info.compiler_type: + print_success(f"Detected: {detected_info.name} ({LANGUAGE_CONFIGS[detected_info.language].name})") + else: + print_warning("Could not detect compiler type") + + except Exception as e: + print_error(f"Detection failed: {e}") + # Create minimal info + detected_info = CompilerInfo( + id="custom-compiler", + name=os.path.basename(compiler_path), + exe=compiler_path, + language=language or "c++", + ) + + # Override with command-line options + if language: + detected_info.language = language + + # Suggest appropriate group if not already set + if not detected_info.group: + if not verify_only and config_mgr is not None: + # Normal mode - create config manager and suggest group + suggested_group = config_mgr.suggest_appropriate_group(detected_info) + if suggested_group: + detected_info.group = suggested_group + else: + # Verify-only mode - create a temporary config manager just for suggestion + temp_config_mgr = ConfigManager(find_ce_config_directory(), env, debug=debug) + suggested_group = temp_config_mgr.suggest_appropriate_group(detected_info) + if suggested_group: + detected_info.group = suggested_group + + # Initialize flag for forcing custom ID/name + force_custom_id_name = False + + # Check for existing compiler by path early (before prompts) + if not verify_only and config_mgr is not None: + existing_compiler_id = config_mgr.check_existing_compiler_by_path(compiler_path, detected_info.language) + if existing_compiler_id: + file_path = config_mgr.get_properties_path(detected_info.language) + print_warning(f"Compiler already exists in {env} environment!") + print_info(f"Existing compiler ID: {existing_compiler_id}") + print_info(f"Executable path: {compiler_path}") + print_info(f"Properties file: {file_path}") + + # If automated mode (-y), exit immediately + if yes or non_interactive: + print_info("No changes were made.") + sys.exit(0) + + # In interactive mode, ask if user wants to continue with different ID/name + if not click.confirm("\nWould you like to add this compiler anyway with a different ID and name?"): + print_info("No changes were made.") + sys.exit(0) + + print_info("You will need to provide a unique compiler ID and custom name.") + # Set flag to force custom ID and name prompts + force_custom_id_name = True + # Suggest the group from the existing duplicate compiler + if config_mgr is not None: + suggested_group = config_mgr.suggest_appropriate_group(detected_info, existing_compiler_id) + if suggested_group and not detected_info.group: + detected_info.group = suggested_group + + # If verify-only mode, display info and exit + if verify_only: + click.echo("\nDetected compiler information:") + click.echo(f" Path: {compiler_path}") + lang_name = ( + LANGUAGE_CONFIGS[detected_info.language].name + if detected_info.language in LANGUAGE_CONFIGS + else detected_info.language + ) + click.echo(f" Language: {lang_name}") + click.echo(f" Compiler Type: {detected_info.compiler_type or 'unknown'}") + click.echo(f" Version: {detected_info.version or 'unknown'}") + click.echo(f" Semver: {detected_info.semver or 'unknown'}") + if detected_info.target: + click.echo(f" Target: {detected_info.target}") + click.echo(f" Cross-compiler: {'Yes' if detected_info.is_cross_compiler else 'No'}") + click.echo(f" Suggested ID: {detected_info.id}") + click.echo(f" Suggested Name: {detected_info.name}") + click.echo(f" Suggested Group: {detected_info.group or 'none'}") + sys.exit(0) + + # Interactive prompts for missing information + if not yes and not non_interactive: + questions = [] + + # Windows SDK path prompt for MSVC compilers if auto-detection failed + if detected_info.needs_sdk_prompt: + print_info("Windows SDK auto-detection failed. You can optionally specify the Windows SDK path.") + print_info("Example: Z:/compilers/windows-kits-10 (leave empty to skip)") + sdk_question = inquirer.Text( + "windows_sdk_path", + message="Windows SDK base path (optional)", + default="", + validate=lambda _, x: x == "" or os.path.isdir(x.replace("\\", "/")) + ) + sdk_answers = inquirer.prompt([sdk_question]) + if sdk_answers and sdk_answers["windows_sdk_path"].strip(): + # Apply the user-provided SDK path + detected_info = detector.set_windows_sdk_path(detected_info, sdk_answers["windows_sdk_path"].strip()) + print_success(f"Windows SDK paths added from: {sdk_answers['windows_sdk_path']}") + + # Language selection if needed + if not language and detected_info.language: + lang_choices = [(LANGUAGE_CONFIGS[k].name, k) for k in LANGUAGE_CONFIGS.keys()] + questions.append( + inquirer.List( + "language", message="Programming language", choices=lang_choices, default=detected_info.language + ) + ) + + # Compiler ID - force custom if duplicate exists + if force_custom_id_name: + questions.append( + inquirer.Text( + "compiler_id", + message="Compiler ID (must be unique)", + default=compiler_id or "", + validate=lambda _, x: bool(x and x.strip() and x != detected_info.id), + ) + ) + else: + questions.append( + inquirer.Text( + "compiler_id", + message="Compiler ID", + default=compiler_id or detected_info.id, + validate=lambda _, x: bool(x and x.strip()), + ) + ) + + # Display name - force custom if duplicate exists + if force_custom_id_name: + questions.append( + inquirer.Text( + "display_name", + message="Display name (must be custom)", + default=display_name or "", + validate=lambda _, x: bool(x and x.strip() and x != detected_info.name), + ) + ) + else: + questions.append( + inquirer.Text("display_name", message="Display name", default=display_name or detected_info.name) + ) + + # Compiler type (if not detected) + if not detected_info.compiler_type: + # Get all supported compiler types dynamically + supported_types = sorted(get_supported_compiler_types()) + # Add 'other' as fallback option + type_choices = supported_types + ["other"] + + questions.append( + inquirer.List("compiler_type", message="Compiler type", choices=type_choices, default="other") + ) + + # Group + questions.append( + inquirer.Text( + "group", + message="Add to group", + default=group or detected_info.group or detected_info.compiler_type or "", + ) + ) + + # Options + questions.append( + inquirer.Text( + "options", + message="Additional options (space-separated, quote options with spaces)", + default=options or "", + ) + ) + + if questions: + answers = inquirer.prompt(questions) + if not answers: + print_error("Cancelled by user") + sys.exit(1) + + # Update detected info + if "language" in answers: + detected_info.language = answers["language"] + if "compiler_id" in answers: + detected_info.id = answers["compiler_id"] + if "display_name" in answers: + detected_info.name = answers["display_name"] + # If this is a duplicate override scenario, force the name to be included + if force_custom_id_name: + detected_info.force_name = True + if "compiler_type" in answers: + compiler_type = answers["compiler_type"] + # Validate compiler type against supported types + if compiler_type != "other": + supported_types = get_supported_compiler_types() + if compiler_type not in supported_types: + print_warning(f"'{compiler_type}' is not a recognized compiler type in Compiler Explorer") + detected_info.compiler_type = compiler_type + if "group" in answers and answers["group"]: + detected_info.group = answers["group"] + if "options" in answers and answers["options"]: + detected_info.options = format_compiler_options(answers["options"]) + else: + # In automated mode, use command-line values + if compiler_id: + detected_info.id = compiler_id + if display_name: + detected_info.name = display_name + # If this is a duplicate override scenario, force the name to be included + if force_custom_id_name: + detected_info.force_name = True + if group: + detected_info.group = group + if options: + detected_info.options = format_compiler_options(options) + + # Ensure unique ID (config_mgr should not be None at this point) + assert config_mgr is not None, "config_mgr should not be None in non-verify mode" + original_id = detected_info.id + detected_info.id = config_mgr.ensure_compiler_id_unique(detected_info.id, detected_info.language) + if detected_info.id != original_id: + print_warning(f"ID already exists, using: {detected_info.id}") + + # Show configuration preview + print_info("\nConfiguration preview:") + normalized_exe_path = detected_info.exe.replace("\\", "/") + click.echo(f" compiler.{detected_info.id}.exe={normalized_exe_path}") + + # Check if semver will be available (either detected or extracted) + semver_to_use = detected_info.semver + if not semver_to_use: + # Try to extract version like the config manager will do + try: + semver_to_use = config_mgr._extract_compiler_version(detected_info.exe) + except Exception: + pass + + # Show semver if available + if semver_to_use: + click.echo(f" compiler.{detected_info.id}.semver={semver_to_use}") + + # Show name if semver is not available OR if this is a duplicate override scenario + if detected_info.name and (not semver_to_use or force_custom_id_name): + click.echo(f" compiler.{detected_info.id}.name={detected_info.name}") + + if detected_info.compiler_type: + click.echo(f" compiler.{detected_info.id}.compilerType={detected_info.compiler_type}") + if detected_info.options: + click.echo(f" compiler.{detected_info.id}.options={detected_info.options}") + if detected_info.java_home: + click.echo(f" compiler.{detected_info.id}.java_home={detected_info.java_home}") + if detected_info.runtime: + click.echo(f" compiler.{detected_info.id}.runtime={detected_info.runtime}") + if detected_info.execution_wrapper: + click.echo(f" compiler.{detected_info.id}.executionWrapper={detected_info.execution_wrapper}") + if detected_info.include_path: + click.echo(f" compiler.{detected_info.id}.includePath={detected_info.include_path}") + if detected_info.lib_path: + click.echo(f" compiler.{detected_info.id}.libPath={detected_info.lib_path}") + if detected_info.group: + click.echo(f" Will add to group: {detected_info.group}") + + # Confirm + file_path = config_mgr.get_properties_path(detected_info.language) + if not yes and not non_interactive: + if not click.confirm(f"\nUpdate {file_path}?"): + print_error("Cancelled by user") + sys.exit(1) + + # Add compiler + config_mgr.add_compiler(detected_info) + print_success("Configuration updated successfully!") + + # Validate with propscheck + print_info("Validating with propscheck.py...") + valid, message = config_mgr.validate_with_propscheck(detected_info.language) + if valid: + print_success(message) + else: + print_error(message) + # Don't exit with error, as the file was written successfully + + # Discovery validation (default for local environment, optional for others) + should_validate_discovery = validate_discovery or (env == "local") + if should_validate_discovery: + print_info("Validating with discovery...") + valid, message, discovered_semver = config_mgr.validate_with_discovery( + detected_info.language, detected_info.id + ) + if valid: + print_success(message) + if discovered_semver: + print_info(f"Discovered semver: {discovered_semver}") + else: + print_error(message) + print_info( + "Note: Discovery validation failed, but the compiler was added to the properties file successfully." + ) + + click.echo(f"\n{Fore.GREEN}Compiler added successfully!{Style.RESET_ALL}") + click.echo("You may need to restart Compiler Explorer for changes to take effect.") + + except KeyboardInterrupt: + print_error("\nCancelled by user") + sys.exit(1) + except Exception as e: + print_error(f"Error: {e}") + sys.exit(1) + + +if __name__ == "__main__": + cli() diff --git a/etc/scripts/ce-properties-wizard/ce_properties_wizard/models.py b/etc/scripts/ce-properties-wizard/ce_properties_wizard/models.py new file mode 100644 index 000000000..2caff4cfa --- /dev/null +++ b/etc/scripts/ce-properties-wizard/ce_properties_wizard/models.py @@ -0,0 +1,54 @@ +"""Data models for the CE Properties Wizard.""" + +import re +from typing import List, Optional + +from pydantic import BaseModel, Field, validator + + +class CompilerInfo(BaseModel): + """Model representing compiler information.""" + + id: str = Field(..., description="Unique identifier for the compiler") + name: str = Field(..., description="Display name for the compiler") + exe: str = Field(..., description="Path to the compiler executable") + compiler_type: Optional[str] = Field(None, description="Type of compiler (gcc, clang, etc)") + version: Optional[str] = Field(None, description="Compiler version") + semver: Optional[str] = Field(None, description="Semantic version") + group: Optional[str] = Field(None, description="Compiler group to add to") + options: Optional[str] = Field(None, description="Default compiler options") + language: str = Field(..., description="Programming language") + target: Optional[str] = Field(None, description="Target platform (for cross-compilers)") + is_cross_compiler: bool = Field(False, description="Whether this is a cross-compiler") + force_name: bool = Field(False, description="Force inclusion of .name property even when semver exists") + java_home: Optional[str] = Field(None, description="JAVA_HOME path for Java-based compilers") + runtime: Optional[str] = Field(None, description="Runtime executable path for Java-based compilers") + execution_wrapper: Optional[str] = Field(None, description="Execution wrapper path for languages like Dart") + include_path: Optional[str] = Field(None, description="Include paths for MSVC compilers") + lib_path: Optional[str] = Field(None, description="Library paths for MSVC compilers") + needs_sdk_prompt: bool = Field(False, description="Whether to prompt user for Windows SDK path") + + @validator("id") + def validate_id(cls, value): # noqa: N805 + """Ensure ID is valid for properties files.""" + if not re.match(r"^[a-zA-Z0-9_-]+$", value): + raise ValueError("ID must contain only alphanumeric characters, hyphens, and underscores") + return value + + +class LanguageConfig(BaseModel): + """Model representing language configuration.""" + + name: str = Field(..., description="Language name") + properties_file: str = Field(..., description="Properties filename (without path, defaults to local)") + compiler_types: List[str] = Field(default_factory=list, description="Known compiler types for this language") + extensions: List[str] = Field(default_factory=list, description="File extensions") + keywords: List[str] = Field(default_factory=list, description="Keywords in compiler path/name") + + def get_properties_file(self, env: str = "local") -> str: + """Get properties file name for specified environment.""" + if env == "local": + return self.properties_file + else: + # Replace .local. with .{env}. + return self.properties_file.replace(".local.", f".{env}.") diff --git a/etc/scripts/ce-properties-wizard/ce_properties_wizard/surgical_editor.py b/etc/scripts/ce-properties-wizard/ce_properties_wizard/surgical_editor.py new file mode 100644 index 000000000..f064c1c78 --- /dev/null +++ b/etc/scripts/ce-properties-wizard/ce_properties_wizard/surgical_editor.py @@ -0,0 +1,608 @@ +"""Surgical properties file editor that preserves existing structure.""" + +import re +from pathlib import Path +from typing import List, Optional, Tuple + +from .models import CompilerInfo +from .utils import create_backup + + +class PropertiesFileEditor: + """Surgical editor that makes minimal changes to properties files.""" + + def __init__(self, file_path: Path): + self.file_path = file_path + self.lines: List[str] = [] + self.load_file() + + def load_file(self): + """Load file content, preserving all structure.""" + if self.file_path.exists(): + with open(self.file_path, "r", encoding="utf-8") as f: + self.lines = [line.rstrip("\n") for line in f.readlines()] + else: + self.lines = [] + + def save_file(self): + """Save file with minimal changes.""" + # Create backup if file exists + if self.file_path.exists(): + create_backup(self.file_path) + + with open(self.file_path, "w", encoding="utf-8") as f: + for line in self.lines: + f.write(f"{line}\n") + + def find_compilers_line(self) -> Optional[int]: + """Find the compilers= line.""" + for i, line in enumerate(self.lines): + if line.startswith("compilers="): + return i + return None + + def find_group_section(self, group_name: str) -> Tuple[Optional[int], Optional[int]]: + """Find the start and end of a group section. + + Returns: + (start_line, end_line) where start_line is the first group.{name}. line + and end_line is the line before the next group or compiler section starts. + """ + start_line = None + end_line = None + + # Find start of this group + group_prefix = f"group.{group_name}." + for i, line in enumerate(self.lines): + if line.startswith(group_prefix): + start_line = i + break + + if start_line is None: + return None, None + + # Find end of this group (before next group or compiler section) + for i in range(start_line + 1, len(self.lines)): + line = self.lines[i].strip() + if ( + line.startswith("group.") + and not line.startswith(group_prefix) + or line.startswith("compiler.") + or line.startswith("libs=") + or line.startswith("tools=") + or line.startswith("#") + and ("####" in line or "Installed" in line) + ): + end_line = i + break + + if end_line is None: + end_line = len(self.lines) + + return start_line, end_line + + def find_compiler_section(self, compiler_id: str) -> Tuple[Optional[int], Optional[int]]: + """Find the start and end of a compiler section.""" + start_line = None + end_line = None + + # Find start of this compiler + compiler_prefix = f"compiler.{compiler_id}." + for i, line in enumerate(self.lines): + if line.startswith(compiler_prefix): + start_line = i + break + + if start_line is None: + return None, None + + # Find end of this compiler (before next compiler, group, or libs/tools section) + for i in range(start_line + 1, len(self.lines)): + line = self.lines[i].strip() + if ( + line.startswith("compiler.") + and not line.startswith(compiler_prefix) + or line.startswith("group.") + or line.startswith("libs=") + or line.startswith("tools=") + or line.startswith("#") + and ("####" in line or "Installed" in line) + ): + end_line = i + break + + if end_line is None: + end_line = len(self.lines) + + return start_line, end_line + + def get_existing_groups_from_compilers_line(self) -> List[str]: + """Extract group names from the compilers= line.""" + compilers_line_idx = self.find_compilers_line() + if compilers_line_idx is None: + return [] + + line = self.lines[compilers_line_idx] + # Extract groups from compilers=&group1:&group2:... + if "=" in line: + value = line.split("=", 1)[1] + groups = [] + for part in value.split(":"): + part = part.strip() + if part.startswith("&"): + groups.append(part[1:]) # Remove & prefix + return groups + return [] + + def add_group_to_compilers_line(self, group_name: str): + """Add a group to the compilers= line if not already present.""" + existing_groups = self.get_existing_groups_from_compilers_line() + if group_name in existing_groups: + return # Already exists + + # Check if this group is referenced by any existing parent groups + # (e.g., vcpp_x64 might be referenced by group.vcpp.compilers=&vcpp_x86:&vcpp_x64:&vcpp_arm64) + if self._is_group_referenced_elsewhere(group_name): + return # Already referenced by another group + + compilers_line_idx = self.find_compilers_line() + if compilers_line_idx is None: + # No compilers line exists, create one + self.lines.insert(0, f"compilers=&{group_name}") + return + + # Add to existing line + line = self.lines[compilers_line_idx] + if line.endswith("="): + # Empty compilers line, just append without colon + self.lines[compilers_line_idx] = f"{line}&{group_name}" + elif line.endswith(":"): + # Line ends with colon, just append + self.lines[compilers_line_idx] = f"{line}&{group_name}" + else: + # Add with colon separator + self.lines[compilers_line_idx] = f"{line}:&{group_name}" + + def _is_group_referenced_elsewhere(self, group_name: str) -> bool: + """Check if a group is referenced by any other group's compilers list.""" + for line in self.lines: + # Look for group.*.compilers= lines that reference this group + if ".compilers=" in line and not line.startswith(f"group.{group_name}.compilers="): + # Extract the value part after = + if "=" in line: + value = line.split("=", 1)[1] + # Check if this group is referenced (with & prefix) + referenced_groups = [] + for part in value.split(":"): + part = part.strip() + if part.startswith("&"): + referenced_groups.append(part[1:]) # Remove & prefix + + if group_name in referenced_groups: + return True + return False + + def group_exists(self, group_name: str) -> bool: + """Check if a group already exists in the file.""" + start_line, _ = self.find_group_section(group_name) + return start_line is not None + + def compiler_exists(self, compiler_id: str) -> bool: + """Check if a compiler already exists in the file.""" + start_line, _ = self.find_compiler_section(compiler_id) + return start_line is not None + + def find_insertion_point_for_group(self, group_name: str) -> int: + """Find the best place to insert a new group section.""" + # Find the end of both existing groups AND all compilers + last_group_end = 0 + last_compiler_end = 0 + + # Find end of existing groups + for i, line in enumerate(self.lines): + if line.startswith("group."): + # Find end of this group + group_match = re.match(r"^group\.([^.]+)\.", line) + if group_match: + current_group = group_match.group(1) + _, end_line = self.find_group_section(current_group) + if end_line is not None: + last_group_end = max(last_group_end, end_line) + + # Find end of all compilers + for i, line in enumerate(self.lines): + if line.startswith("compiler."): + # Find end of this compiler + compiler_match = re.match(r"^compiler\.([^.]+)\.", line) + if compiler_match: + current_compiler = compiler_match.group(1) + _, end_line = self.find_compiler_section(current_compiler) + if end_line is not None: + last_compiler_end = max(last_compiler_end, end_line) + + # Insert after whichever comes last: groups or compilers + insertion_point = max(last_group_end, last_compiler_end) + + # If neither groups nor compilers found, insert after compilers line + if insertion_point == 0: + compilers_line_idx = self.find_compilers_line() + if compilers_line_idx is not None: + # Insert after compilers line and any following blank lines + insertion_point = compilers_line_idx + 1 + while insertion_point < len(self.lines) and self.lines[insertion_point].strip() == "": + insertion_point += 1 + return insertion_point + else: + return 0 + + return insertion_point + + def find_insertion_point_for_compiler(self, compiler_id: str, group_name: Optional[str] = None) -> int: + """Find the best place to insert a new compiler section.""" + # If we have a group, try to insert at the end of that group's compilers + if group_name: + group_start, group_end = self.find_group_section(group_name) + if group_start is not None: + # Look for compilers from this group after the group definition + last_compiler_end = group_end + + # Find compilers that belong to this group + compilers_in_group = self.get_compilers_in_group(group_name) + for comp_id in compilers_in_group: + if comp_id != compiler_id: # Don't include ourselves + _, comp_end = self.find_compiler_section(comp_id) + if comp_end is not None: + last_compiler_end = max(last_compiler_end, comp_end) + + return last_compiler_end + + # Fallback: find the end of all compilers, but insert before libs/tools + last_compiler_end = 0 + libs_tools_start = len(self.lines) # Default to end of file + + # Find where libs/tools sections start + for i, line in enumerate(self.lines): + if ( + line.startswith("libs=") + or line.startswith("tools=") + or (line.startswith("#") and ("####" in line or "Installed" in line)) + ): + libs_tools_start = i + break + + # Find end of all compilers, but only those before libs/tools + for i, line in enumerate(self.lines): + if i >= libs_tools_start: + break + if line.startswith("compiler."): + # Find end of this compiler + compiler_match = re.match(r"^compiler\.([^.]+)\.", line) + if compiler_match: + current_compiler = compiler_match.group(1) + _, end_line = self.find_compiler_section(current_compiler) + if end_line is not None and end_line <= libs_tools_start: + last_compiler_end = max(last_compiler_end, end_line) + + if last_compiler_end == 0: + # No compilers found, insert after groups but before libs/tools + group_insertion = self.find_insertion_point_for_group("dummy") + return min(group_insertion, libs_tools_start) + + return min(last_compiler_end, libs_tools_start) + + def get_compilers_in_group(self, group_name: str) -> List[str]: + """Get list of compiler IDs in a group.""" + group_start, group_end = self.find_group_section(group_name) + if group_start is None: + return [] + + # Look for group.{name}.compilers line + compilers_key = f"group.{group_name}.compilers" + for i in range(group_start, group_end): + line = self.lines[i] + if line.startswith(compilers_key + "="): + value = line.split("=", 1)[1] + # Parse compiler list (could be : separated or & prefixed) + compilers = [] + for part in value.split(":"): + part = part.strip() + if part.startswith("&"): + part = part[1:] # Remove & prefix + if part: + compilers.append(part) + return compilers + + return [] + + def add_compiler_to_group(self, group_name: str, compiler_id: str): + """Add a compiler to a group's compilers list.""" + group_start, group_end = self.find_group_section(group_name) + if group_start is None: + return # Group doesn't exist + + # Find the group.{name}.compilers line + compilers_key = f"group.{group_name}.compilers" + for i in range(group_start, group_end): + line = self.lines[i] + if line.startswith(compilers_key + "="): + # Check if compiler is already in the list + existing_compilers = self.get_compilers_in_group(group_name) + if compiler_id in existing_compilers: + return # Already exists + + # Add to the list + if line.endswith("="): + # Empty list + self.lines[i] = f"{line}{compiler_id}" + else: + # Add with colon separator + self.lines[i] = f"{line}:{compiler_id}" + return + + def add_group_property(self, group_name: str, property_name: str, value: str): + """Add a property to a group if it doesn't already exist.""" + group_start, group_end = self.find_group_section(group_name) + if group_start is None: + return # Group doesn't exist + + # Check if property already exists + prop_key = f"group.{group_name}.{property_name}" + for i in range(group_start, group_end): + line = self.lines[i] + if line.startswith(prop_key + "="): + return # Already exists + + # Find a good place to insert (after the compilers line if it exists) + insertion_point = group_start + 1 + compilers_key = f"group.{group_name}.compilers" + for i in range(group_start, group_end): + line = self.lines[i] + if line.startswith(compilers_key + "="): + insertion_point = i + 1 + break + + # Insert the new property + self.lines.insert(insertion_point, f"{prop_key}={value}") + + def get_group_property(self, group_name: str, property_name: str) -> Optional[str]: + """Get a property value from a group.""" + group_start, group_end = self.find_group_section(group_name) + if group_start is None: + return None + + # Check if property exists + prop_key = f"group.{group_name}.{property_name}" + for i in range(group_start, group_end): + line = self.lines[i] + if line.startswith(prop_key + "="): + return line.split("=", 1)[1] + + return None + + def add_compiler_property(self, compiler_id: str, property_name: str, value: str): + """Add a property to a compiler if it doesn't already exist.""" + compiler_start, compiler_end = self.find_compiler_section(compiler_id) + if compiler_start is None: + return # Compiler doesn't exist + + # Check if property already exists + prop_key = f"compiler.{compiler_id}.{property_name}" + for i in range(compiler_start, compiler_end): + line = self.lines[i] + if line.startswith(prop_key + "="): + return # Already exists + + # Insert at the end of the compiler section + insertion_point = compiler_end + + # Try to insert in a logical order (exe, name, semver, compilerType, options, etc.) + desired_order = ["exe", "semver", "name", "compilerType", "options"] + if property_name in desired_order: + target_index = desired_order.index(property_name) + + # Find where to insert based on order + for i in range(compiler_start, compiler_end): + line = self.lines[i] + if line.startswith(f"compiler.{compiler_id}."): + existing_prop = line.split(".", 2)[2].split("=")[0] + if existing_prop in desired_order: + existing_index = desired_order.index(existing_prop) + if existing_index > target_index: + insertion_point = i + break + else: + insertion_point = i + 1 + + # Insert the new property + self.lines.insert(insertion_point, f"{prop_key}={value}") + + def create_group_section(self, group_name: str, compilers_list: Optional[List[str]] = None): + """Create a new group section.""" + if self.group_exists(group_name): + return # Already exists + + insertion_point = self.find_insertion_point_for_group(group_name) + + # Ensure proper spacing: blank line after compilers= and before group + compilers_line_idx = self.find_compilers_line() + if compilers_line_idx is not None and insertion_point == compilers_line_idx + 1: + # We're inserting right after compilers= line, add blank line first + self.lines.insert(insertion_point, "") + insertion_point += 1 + elif ( + insertion_point > 0 and insertion_point < len(self.lines) and self.lines[insertion_point - 1].strip() != "" + ): + # Add empty line before group if previous line is not empty + self.lines.insert(insertion_point, "") + insertion_point += 1 + + # Create the group.{name}.compilers line + compilers_value = ":".join(compilers_list) if compilers_list else "" + self.lines.insert(insertion_point, f"group.{group_name}.compilers={compilers_value}") + + def create_compiler_section(self, compiler: CompilerInfo): + """Create a new compiler section.""" + if self.compiler_exists(compiler.id): + return # Already exists + + insertion_point = self.find_insertion_point_for_compiler(compiler.id, compiler.group) + + # Ensure proper spacing: blank line after group section and before compiler + if compiler.group: + group_start, group_end = self.find_group_section(compiler.group) + if group_end is not None and insertion_point == group_end: + # We're inserting right after group section, add blank line first + self.lines.insert(insertion_point, "") + insertion_point += 1 + + # Add empty line before compiler if previous line is not empty + if insertion_point > 0 and insertion_point < len(self.lines) and self.lines[insertion_point - 1].strip() != "": + self.lines.insert(insertion_point, "") + insertion_point += 1 + + # Add compiler properties in order + props_to_add = [] + + # Normalize exe path for Windows (convert backslashes to forward slashes) + normalized_exe_path = compiler.exe.replace("\\", "/") + props_to_add.append(f"compiler.{compiler.id}.exe={normalized_exe_path}") + + # Add semver if available, name if no semver or force_name is True + if compiler.semver: + props_to_add.append(f"compiler.{compiler.id}.semver={compiler.semver}") + if compiler.name and (not compiler.semver or compiler.force_name): + props_to_add.append(f"compiler.{compiler.id}.name={compiler.name}") + + # Only add compilerType if the group doesn't already have the same one + if compiler.compiler_type: + group_compiler_type = None + if compiler.group: + group_compiler_type = self.get_group_property(compiler.group, "compilerType") + + # Add compilerType only if group doesn't have it or has a different one + if not group_compiler_type or group_compiler_type != compiler.compiler_type: + props_to_add.append(f"compiler.{compiler.id}.compilerType={compiler.compiler_type}") + + if compiler.options: + props_to_add.append(f"compiler.{compiler.id}.options={compiler.options}") + + # Add Java-related properties for Java-based compilers + if compiler.java_home: + props_to_add.append(f"compiler.{compiler.id}.java_home={compiler.java_home}") + + if compiler.runtime: + props_to_add.append(f"compiler.{compiler.id}.runtime={compiler.runtime}") + + # Add execution wrapper for compilers that need it + if compiler.execution_wrapper: + props_to_add.append(f"compiler.{compiler.id}.executionWrapper={compiler.execution_wrapper}") + + # Add MSVC-specific include and library paths + if compiler.include_path: + props_to_add.append(f"compiler.{compiler.id}.includePath={compiler.include_path}") + if compiler.lib_path: + props_to_add.append(f"compiler.{compiler.id}.libPath={compiler.lib_path}") + + # Insert all properties + for prop in props_to_add: + self.lines.insert(insertion_point, prop) + insertion_point += 1 + + def ensure_libs_tools_sections(self): + """Ensure libs= and tools= sections exist at the end if missing.""" + has_libs = any(line.startswith("libs=") for line in self.lines) + has_tools = any(line.startswith("tools=") for line in self.lines) + + if has_libs and has_tools: + # Check if there's proper spacing before libs section + self._ensure_proper_spacing_before_libs_tools() + return # Both exist + + # Find insertion point (end of file, but before any existing libs/tools) + insertion_point = len(self.lines) + for i, line in enumerate(self.lines): + if line.startswith("libs=") or line.startswith("tools="): + insertion_point = i + break + + # Add sections if missing + if not has_libs: + # Add libs section header + self.lines.insert(insertion_point, "") + self.lines.insert(insertion_point + 1, "#################################") + self.lines.insert(insertion_point + 2, "#################################") + self.lines.insert(insertion_point + 3, "# Installed libs") + self.lines.insert(insertion_point + 4, "libs=") + insertion_point += 5 + + if not has_tools: + # Add tools section header + self.lines.insert(insertion_point, "") + self.lines.insert(insertion_point + 1, "#################################") + self.lines.insert(insertion_point + 2, "#################################") + self.lines.insert(insertion_point + 3, "# Installed tools") + self.lines.insert(insertion_point + 4, "tools=") + + def _ensure_proper_spacing_before_libs_tools(self): + """Ensure there's proper spacing before libs/tools sections.""" + # Find the start of libs/tools sections + libs_tools_start = None + for i, line in enumerate(self.lines): + if ( + line.startswith("libs=") + or line.startswith("tools=") + or (line.startswith("#") and ("####" in line or "Installed" in line or "Libraries" in line)) + ): + libs_tools_start = i + break + + if libs_tools_start is None: + return # No libs/tools sections found + + # Check if there's an empty line before the libs/tools section + if libs_tools_start > 0 and self.lines[libs_tools_start - 1].strip() != "": + # No empty line before libs/tools, add one + self.lines.insert(libs_tools_start, "") + + def ensure_proper_spacing_after_compiler(self, compiler_id: str): + """Ensure proper spacing after a compiler section before libs/tools.""" + compiler_start, compiler_end = self.find_compiler_section(compiler_id) + if compiler_start is None: + return + + # Find if there are libs/tools sections after this compiler + libs_tools_start = None + for i in range(compiler_end, len(self.lines)): + line = self.lines[i] + if ( + line.startswith("libs=") + or line.startswith("tools=") + or (line.startswith("#") and ("####" in line or "Installed" in line or "Libraries" in line)) + ): + libs_tools_start = i + break + + if libs_tools_start is None: + return # No libs/tools sections after this compiler + + # Check spacing between compiler end and libs/tools start + empty_lines_count = 0 + for i in range(compiler_end, libs_tools_start): + if self.lines[i].strip() == "": + empty_lines_count += 1 + else: + # Non-empty line found, reset count + empty_lines_count = 0 + + # Ensure exactly one empty line before libs/tools + if empty_lines_count == 0: + # No empty lines, add one + self.lines.insert(libs_tools_start, "") + elif empty_lines_count > 1: + # Too many empty lines, remove extras + lines_to_remove = empty_lines_count - 1 + for _ in range(lines_to_remove): + for i in range(compiler_end, libs_tools_start): + if i < len(self.lines) and self.lines[i].strip() == "": + self.lines.pop(i) + break diff --git a/etc/scripts/ce-properties-wizard/ce_properties_wizard/utils.py b/etc/scripts/ce-properties-wizard/ce_properties_wizard/utils.py new file mode 100644 index 000000000..2d01209e2 --- /dev/null +++ b/etc/scripts/ce-properties-wizard/ce_properties_wizard/utils.py @@ -0,0 +1,286 @@ +"""Shared utility functions for CE Properties Wizard.""" + +import re +import shutil +import subprocess +from pathlib import Path +from typing import Callable, List, Optional, Tuple + + +def find_ce_root_directory(search_targets: List[Tuple[str, Callable]], max_levels: int = 6) -> Optional[Path]: + """Find CE root directory by looking for specific target paths. + + Args: + search_targets: List of (relative_path, validation_function) tuples + max_levels: Maximum directory levels to traverse upward + + Returns: + Path to CE root directory if found, None otherwise + """ + current_dir = Path(__file__).resolve().parent + + for _ in range(max_levels): + for target_path, validator in search_targets: + target_dir = current_dir / target_path + if target_dir.exists() and validator(target_dir): + return current_dir + current_dir = current_dir.parent + + return None + + +def find_ce_config_directory() -> Path: + """Find the etc/config directory containing CE configuration files.""" + + def validate_config_dir(path: Path) -> bool: + return path.is_dir() and any(path.glob("*.defaults.properties")) + + search_targets = [("etc/config", validate_config_dir)] + ce_root = find_ce_root_directory(search_targets) + + if ce_root: + return ce_root / "etc" / "config" + + # Fallback: check if we're already in the main CE directory + if Path("etc/config").exists() and Path("etc/config").is_dir(): + config_dir = Path("etc/config").resolve() + if any(config_dir.glob("*.defaults.properties")): + return config_dir + + raise FileNotFoundError("Could not find etc/config directory with CE configuration files") + + +def find_ce_lib_directory() -> Path: + """Find the lib directory containing CE TypeScript files.""" + + def validate_lib_dir(path: Path) -> bool: + compilers_dir = path / "compilers" + return compilers_dir.exists() and compilers_dir.is_dir() and any(compilers_dir.glob("*.ts")) + + search_targets = [("lib", validate_lib_dir)] + ce_root = find_ce_root_directory(search_targets) + + if ce_root: + return ce_root / "lib" + + # Fallback: assume we're in the main CE directory + lib_dir = Path("lib") + if validate_lib_dir(lib_dir): + return lib_dir.resolve() + + raise FileNotFoundError("Could not find lib directory with TypeScript files") + + +def create_backup(file_path: Path) -> Path: + """Create a backup of the file with .bak extension. + + Args: + file_path: Path to the file to backup + + Returns: + Path to the created backup file + """ + backup_path = file_path.with_suffix(".properties.bak") + if file_path.exists(): + shutil.copy2(file_path, backup_path) + return backup_path + + +class SubprocessRunner: + """Utility class for running subprocess commands with consistent error handling.""" + + @staticmethod + def run_with_timeout( + cmd: List[str], timeout: Optional[int] = 10, capture_output: bool = True, text: bool = True + ) -> Optional[subprocess.CompletedProcess]: + """Run a subprocess command with timeout and error handling. + + Args: + cmd: Command and arguments to execute + timeout: Timeout in seconds (None for no timeout) + capture_output: Whether to capture stdout/stderr + text: Whether to return text output + + Returns: + CompletedProcess result if successful, None if failed + """ + try: + # If timeout is None, run without timeout + if timeout is None: + result = subprocess.run(cmd, capture_output=capture_output, text=text) + else: + result = subprocess.run(cmd, capture_output=capture_output, text=text, timeout=timeout) + return result + except (subprocess.TimeoutExpired, subprocess.SubprocessError): + return None + + +class VersionExtractor: + """Utility class for extracting version information from compiler output.""" + + # Regex patterns for different compiler types + PATTERNS = { + "gcc": [r"gcc.*?(\d+\.\d+\.\d+)", r"g\+\+.*?(\d+\.\d+\.\d+)"], + "clang": [r"clang version (\d+\.\d+\.\d+)"], + "intel": [r"(?:icc|icpc|icx|dpcpp).*?(\d+\.\d+\.\d+)", r"intel.*?compiler.*?(\d+\.\d+)"], + "intel_fortran": [r"(?:ifx|ifort)\s*\([^)]+\)\s*(\d+\.\d+\.\d+)", r"(?:ifx|ifort).*?(\d+\.\d+\.\d+)"], + "msvc": [r"compiler version (\d+\.\d+\.\d+)"], + "nvcc": [r"release (\d+\.\d+)"], + "rust": [r"rustc (\d+\.\d+\.\d+)"], + "go": [r"go\s*version\s+go(\d+\.\d+(?:\.\d+)?)", r"go(\d+\.\d+(?:\.\d+)?)"], + "tinygo": [r"(?:tinygo\s+)?version:?\s+(\d+\.\d+(?:\.\d+)?)"], + "python": [r"python (\d+\.\d+\.\d+)", r"pypy.*?(\d+\.\d+\.\d+)"], + "fpc": [r"Free Pascal Compiler version (\d+\.\d+\.\d+)", r"fpc.*?(\d+\.\d+\.\d+)"], + "z88dk": [r"z88dk.*?-\s*v([^-\s]+(?:-[^-\s]+)*)", r"v(\d+[^-\s]*(?:-[^-\s]*)*)"], + "kotlin": [r"kotlinc.*?(\d+\.\d+\.\d+)", r"kotlin.*?(\d+\.\d+\.\d+)"], + "zig": [r"zig (\d+\.\d+\.\d+)", r"zig.*?(\d+\.\d+\.\d+)"], + "dart": [r"Dart SDK version: (\d+\.\d+\.\d+)", r"dart.*?(\d+\.\d+\.\d+)"], + # Popular compiled languages + "dmd": [r"DMD.*?v(\d+\.\d+\.\d+)", r"dmd.*?(\d+\.\d+\.\d+)"], + "ldc": [r"LDC.*?(\d+\.\d+\.\d+)", r"ldc.*?(\d+\.\d+\.\d+)"], + "gdc": [r"gdc.*?(\d+\.\d+\.\d+)", r"GNU D compiler.*?(\d+\.\d+\.\d+)"], + "swiftc": [r"Swift version (\d+\.\d+(?:\.\d+)?)", r"swiftc.*?(\d+\.\d+(?:\.\d+)?)"], + "nim": [r"Nim Compiler Version (\d+\.\d+\.\d+)", r"nim.*?(\d+\.\d+\.\d+)"], + "crystal": [r"Crystal (\d+\.\d+\.\d+)", r"crystal.*?(\d+\.\d+\.\d+)"], + "v": [r"V (\d+\.\d+(?:\.\d+)?)", r"v.*?(\d+\.\d+(?:\.\d+)?)"], + # Functional languages + "ghc": [r"The Glorious Glasgow Haskell Compilation System, version (\d+\.\d+\.\d+)", r"ghc.*?(\d+\.\d+\.\d+)"], + "ocamlc": [r"OCaml version (\d+\.\d+\.\d+)", r"ocaml.*?(\d+\.\d+\.\d+)"], + "ocamlopt": [r"OCaml version (\d+\.\d+\.\d+)", r"ocaml.*?(\d+\.\d+\.\d+)"], + "scalac": [r"Scala compiler version (\d+\.\d+\.\d+)", r"scala.*?(\d+\.\d+\.\d+)"], + # .NET languages + "csharp": [r"Microsoft.*?C# Compiler version (\d+\.\d+\.\d+)", r"dotnet.*?(\d+\.\d+\.\d+)"], + "dotnet": [r"Microsoft.*?\.NET.*?(\d+\.\d+\.\d+)", r"dotnet.*?(\d+\.\d+\.\d+)"], + "fsharp": [r"F# Compiler.*?(\d+\.\d+\.\d+)", r"fsharpc.*?(\d+\.\d+\.\d+)"], + # Scripting/Dynamic languages + "ruby": [r"ruby (\d+\.\d+\.\d+)", r"ruby.*?(\d+\.\d+\.\d+)"], + "julia": [r"julia version (\d+\.\d+\.\d+)", r"julia.*?(\d+\.\d+\.\d+)"], + "elixir": [r"Elixir (\d+\.\d+\.\d+)", r"elixir.*?(\d+\.\d+\.\d+)"], + "erlc": [r"Erlang.*?(\d+(?:\.\d+)*)", r"erlc.*?(\d+(?:\.\d+)*)"], + # Assembly and low-level + "nasm": [r"NASM version (\d+\.\d+(?:\.\d+)?)", r"nasm.*?(\d+\.\d+(?:\.\d+)?)"], + "gas": [r"GNU assembler.*?(\d+\.\d+(?:\.\d+)?)", r"as.*?(\d+\.\d+(?:\.\d+)?)"], + "yasm": [r"yasm (\d+\.\d+\.\d+)", r"yasm.*?(\d+\.\d+\.\d+)"], + # Modern systems languages + "carbon": [r"Carbon.*?(\d+\.\d+(?:\.\d+)?)", r"carbon.*?(\d+\.\d+(?:\.\d+)?)"], + "mojo": [r"mojo (\d+\.\d+(?:\.\d+)?)", r"mojo.*?(\d+\.\d+(?:\.\d+)?)"], + "odin": [r"odin version (\d+\.\d+(?:\.\d+)?)", r"odin.*?(\d+\.\d+(?:\.\d+)?)"], + "gnatmake": [r"GNATMAKE.*?(\d+\.\d+(?:\.\d+)?)", r"gnat.*?(\d+\.\d+(?:\.\d+)?)"], + "gnucobol": [r"gnucobol.*?(\d+\.\d+(?:\.\d+)?)", r"cobol.*?(\d+\.\d+(?:\.\d+)?)"], + } + + @classmethod + def extract_version(cls, compiler_type: str, output: str) -> Optional[str]: + """Extract version string from compiler output. + + Args: + compiler_type: Type of compiler (gcc, clang, etc.) + output: Raw output from compiler version command + + Returns: + Extracted version string if found, None otherwise + """ + patterns = cls.PATTERNS.get(compiler_type, []) + + for pattern in patterns: + match = re.search(pattern, output, re.IGNORECASE) + if match: + return match.group(1) + + return None + + @classmethod + def extract_semver(cls, version: Optional[str]) -> Optional[str]: + """Extract semantic version from version string. + + Args: + version: Version string to parse + + Returns: + Semantic version (major.minor.patch) if found, None otherwise + """ + if not version: + return None + match = re.match(r"(\d+\.\d+(?:\.\d+)?)", version) + if match: + return match.group(1) + return None + + +class ArchitectureMapper: + """Utility class for architecture and instruction set mapping.""" + + # Architecture mapping based on lib/instructionsets.ts + ARCH_MAPPINGS = { + "aarch64": "aarch64", + "arm64": "aarch64", + "arm": "arm32", + "avr": "avr", + "bpf": "ebpf", + "ez80": "ez80", + "kvx": "kvx", + "k1": "kvx", + "loongarch": "loongarch", + "m68k": "m68k", + "mips": "mips", + "mipsel": "mips", + "mips64": "mips", + "mips64el": "mips", + "nanomips": "mips", + "mrisc32": "mrisc32", + "msp430": "msp430", + "powerpc": "powerpc", + "ppc64": "powerpc", + "ppc": "powerpc", + "riscv64": "riscv64", + "rv64": "riscv64", + "riscv32": "riscv32", + "rv32": "riscv32", + "sh": "sh", + "sparc": "sparc", + "sparc64": "sparc", + "s390x": "s390x", + "vax": "vax", + "wasm32": "wasm32", + "wasm64": "wasm64", + "xtensa": "xtensa", + "z180": "z180", + "z80": "z80", + "x86_64": "amd64", + "x86-64": "amd64", + "amd64": "amd64", + "i386": "x86", + "i486": "x86", + "i586": "x86", + "i686": "x86", + } + + @classmethod + def detect_instruction_set(cls, target: Optional[str], exe_path: str) -> str: + """Detect instruction set from target platform or executable path. + + Args: + target: Target platform string (e.g., from compiler -v output) + exe_path: Path to the compiler executable + + Returns: + Instruction set name (defaults to "amd64" if not detected) + """ + if not target: + target = "" + + target_lower = target.lower() + exe_lower = exe_path.lower() + + # Check target first + for arch, instruction_set in cls.ARCH_MAPPINGS.items(): + if arch in target_lower: + return instruction_set + + # Check executable path as fallback + for arch, instruction_set in cls.ARCH_MAPPINGS.items(): + if arch in exe_lower: + return instruction_set + + # Default to amd64 if nothing detected + return "amd64" diff --git a/etc/scripts/ce-properties-wizard/poetry.lock b/etc/scripts/ce-properties-wizard/poetry.lock new file mode 100644 index 000000000..7056f9aae --- /dev/null +++ b/etc/scripts/ce-properties-wizard/poetry.lock @@ -0,0 +1,1024 @@ +# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "ansicon" +version = "1.89.0" +description = "Python wrapper for loading Jason Hood's ANSICON" +optional = false +python-versions = "*" +groups = ["main"] +markers = "platform_system == \"Windows\"" +files = [ + {file = "ansicon-1.89.0-py2.py3-none-any.whl", hash = "sha256:f1def52d17f65c2c9682cf8370c03f541f410c1752d6a14029f97318e4b9dfec"}, + {file = "ansicon-1.89.0.tar.gz", hash = "sha256:e4d039def5768a47e4afec8e89e83ec3ae5a26bf00ad851f914d1240b444d2b1"}, +] + +[[package]] +name = "attrs" +version = "25.3.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}, + {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}, +] + +[package.extras] +benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] +tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] + +[[package]] +name = "black" +version = "23.12.1" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, + {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, + {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, + {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, + {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, + {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, + {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, + {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, + {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, + {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, + {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, + {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, + {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, + {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, + {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, + {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, + {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, + {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, + {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, + {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, + {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, + {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4) ; sys_platform != \"win32\" or implementation_name != \"pypy\"", "aiohttp (>=3.7.4,!=3.9.0) ; sys_platform == \"win32\" and implementation_name == \"pypy\""] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "blessed" +version = "1.21.0" +description = "Easy, practical library for making terminal apps, by providing an elegant, well-documented interface to Colors, Keyboard input, and screen Positioning capabilities." +optional = false +python-versions = ">=2.7" +groups = ["main"] +files = [ + {file = "blessed-1.21.0-py2.py3-none-any.whl", hash = "sha256:f831e847396f5a2eac6c106f4dfadedf46c4f804733574b15fe86d2ed45a9588"}, + {file = "blessed-1.21.0.tar.gz", hash = "sha256:ece8bbc4758ab9176452f4e3a719d70088eb5739798cd5582c9e05f2a28337ec"}, +] + +[package.dependencies] +jinxed = {version = ">=1.1.0", markers = "platform_system == \"Windows\""} +wcwidth = ">=0.1.4" + +[[package]] +name = "click" +version = "8.2.1" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.10" +groups = ["main", "dev"] +files = [ + {file = "click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b"}, + {file = "click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev"] +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "editor" +version = "1.6.6" +description = "🖋 Open the default text editor 🖋" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "editor-1.6.6-py3-none-any.whl", hash = "sha256:e818e6913f26c2a81eadef503a2741d7cca7f235d20e217274a009ecd5a74abf"}, + {file = "editor-1.6.6.tar.gz", hash = "sha256:bb6989e872638cd119db9a4fce284cd8e13c553886a1c044c6b8d8a160c871f8"}, +] + +[package.dependencies] +runs = "*" +xmod = "*" + +[[package]] +name = "exceptiongroup" +version = "1.3.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +markers = "python_version == \"3.10\"" +files = [ + {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, + {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""} + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "importlab" +version = "0.8.1" +description = "A library to calculate python dependency graphs." +optional = false +python-versions = ">=3.6.0" +groups = ["dev"] +files = [ + {file = "importlab-0.8.1-py2.py3-none-any.whl", hash = "sha256:124cfa00e8a34fefe8aac1a5e94f56c781b178c9eb61a1d3f60f7e03b77338d3"}, + {file = "importlab-0.8.1.tar.gz", hash = "sha256:b3893853b1f6eb027da509c3b40e6787e95dd66b4b66f1b3613aad77556e1465"}, +] + +[package.dependencies] +networkx = ">=2" + +[[package]] +name = "iniconfig" +version = "2.1.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, + {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, +] + +[[package]] +name = "inquirer" +version = "3.4.0" +description = "Collection of common interactive command line user interfaces, based on Inquirer.js" +optional = false +python-versions = ">=3.8.1" +groups = ["main"] +files = [ + {file = "inquirer-3.4.0-py3-none-any.whl", hash = "sha256:bb0ec93c833e4ce7b51b98b1644b0a4d2bb39755c39787f6a504e4fee7a11b60"}, + {file = "inquirer-3.4.0.tar.gz", hash = "sha256:8edc99c076386ee2d2204e5e3653c2488244e82cb197b2d498b3c1b5ffb25d0b"}, +] + +[package.dependencies] +blessed = ">=1.19.0" +editor = ">=1.6.0" +readchar = ">=4.2.0" + +[[package]] +name = "jinja2" +version = "3.1.6" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, + {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jinxed" +version = "1.3.0" +description = "Jinxed Terminal Library" +optional = false +python-versions = "*" +groups = ["main"] +markers = "platform_system == \"Windows\"" +files = [ + {file = "jinxed-1.3.0-py2.py3-none-any.whl", hash = "sha256:b993189f39dc2d7504d802152671535b06d380b26d78070559551cbf92df4fc5"}, + {file = "jinxed-1.3.0.tar.gz", hash = "sha256:1593124b18a41b7a3da3b078471442e51dbad3d77b4d4f2b0c26ab6f7d660dbf"}, +] + +[package.dependencies] +ansicon = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "libcst" +version = "1.8.2" +description = "A concrete syntax tree with AST-like properties for Python 3.0 through 3.13 programs." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "libcst-1.8.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:67d9720d91f507c87b3e5f070627ad640a00bc6cfdf5635f8c6ee9f2964cf71c"}, + {file = "libcst-1.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:94b7c032b72566077614a02baab1929739fd0af0cc1d46deaba4408b870faef2"}, + {file = "libcst-1.8.2-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:11ea148902e3e1688afa392087c728ac3a843e54a87d334d1464d2097d3debb7"}, + {file = "libcst-1.8.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:22c9473a2cc53faabcc95a0ac6ca4e52d127017bf34ba9bc0f8e472e44f7b38e"}, + {file = "libcst-1.8.2-cp310-cp310-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b5269b96367e65793a7714608f6d906418eb056d59eaac9bba980486aabddbed"}, + {file = "libcst-1.8.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:d20e932ddd9a389da57b060c26e84a24118c96ff6fc5dcc7b784da24e823b694"}, + {file = "libcst-1.8.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a553d452004e44b841788f6faa7231a02157527ddecc89dbbe5b689b74822226"}, + {file = "libcst-1.8.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7fe762c4c390039b79b818cbc725d8663586b25351dc18a2704b0e357d69b924"}, + {file = "libcst-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:5c513e64eff0f7bf2a908e2d987a98653eb33e1062ce2afd3a84af58159a24f9"}, + {file = "libcst-1.8.2-cp310-cp310-win_arm64.whl", hash = "sha256:41613fe08e647213546c7c59a5a1fc5484666e7d4cab6e80260c612acbb20e8c"}, + {file = "libcst-1.8.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:688a03bac4dfb9afc5078ec01d53c21556381282bdf1a804dd0dbafb5056de2a"}, + {file = "libcst-1.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c34060ff2991707c710250463ae9f415ebb21653f2f5b013c61c9c376ff9b715"}, + {file = "libcst-1.8.2-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:f54f5c4176d60e7cd6b0880e18fb3fa8501ae046069151721cab457c7c538a3d"}, + {file = "libcst-1.8.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:d11992561de0ad29ec2800230fbdcbef9efaa02805d5c633a73ab3cf2ba51bf1"}, + {file = "libcst-1.8.2-cp311-cp311-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fa3b807c2d2b34397c135d19ad6abb20c47a2ddb7bf65d90455f2040f7797e1e"}, + {file = "libcst-1.8.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:b0110140738be1287e3724080a101e7cec6ae708008b7650c9d8a1c1788ec03a"}, + {file = "libcst-1.8.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a50618f4819a97ef897e055ac7aaf1cad5df84c206f33be35b0759d671574197"}, + {file = "libcst-1.8.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e9bb599c175dc34a4511f0e26d5b5374fbcc91ea338871701a519e95d52f3c28"}, + {file = "libcst-1.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:96e2363e1f6e44bd7256bbbf3a53140743f821b5133046e6185491e0d9183447"}, + {file = "libcst-1.8.2-cp311-cp311-win_arm64.whl", hash = "sha256:f5391d71bd7e9e6c73dcb3ee8d8c63b09efc14ce6e4dad31568d4838afc9aae0"}, + {file = "libcst-1.8.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2e8c1dfa854e700fcf6cd79b2796aa37d55697a74646daf5ea47c7c764bac31c"}, + {file = "libcst-1.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b5c57a3c1976c365678eb0730bcb140d40510990cb77df9a91bb5c41d587ba6"}, + {file = "libcst-1.8.2-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:0f23409add2aaebbb6d8e881babab43c2d979f051b8bd8aed5fe779ea180a4e8"}, + {file = "libcst-1.8.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:b88e9104c456590ad0ef0e82851d4fc03e9aa9d621fa8fdd4cd0907152a825ae"}, + {file = "libcst-1.8.2-cp312-cp312-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5ba3ea570c8fb6fc44f71aa329edc7c668e2909311913123d0d7ab8c65fc357"}, + {file = "libcst-1.8.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:460fcf3562f078781e1504983cb11909eb27a1d46eaa99e65c4b0fafdc298298"}, + {file = "libcst-1.8.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c1381ddbd1066d543e05d580c15beacf671e1469a0b2adb6dba58fec311f4eed"}, + {file = "libcst-1.8.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a70e40ce7600e1b32e293bb9157e9de3b69170e2318ccb219102f1abb826c94a"}, + {file = "libcst-1.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:3ece08ba778b6eeea74d9c705e9af2d1b4e915e9bc6de67ad173b962e575fcc0"}, + {file = "libcst-1.8.2-cp312-cp312-win_arm64.whl", hash = "sha256:5efd1bf6ee5840d1b0b82ec8e0b9c64f182fa5a7c8aad680fbd918c4fa3826e0"}, + {file = "libcst-1.8.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:08e9dca4ab6f8551794ce7ec146f86def6a82da41750cbed2c07551345fa10d3"}, + {file = "libcst-1.8.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8310521f2ccb79b5c4345750d475b88afa37bad930ab5554735f85ad5e3add30"}, + {file = "libcst-1.8.2-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:da2d8b008aff72acd5a4a588491abdda1b446f17508e700f26df9be80d8442ae"}, + {file = "libcst-1.8.2-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:be821d874ce8b26cbadd7277fa251a9b37f6d2326f8b5682b6fc8966b50a3a59"}, + {file = "libcst-1.8.2-cp313-cp313-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f74b0bc7378ad5afcf25ac9d0367b4dbba50f6f6468faa41f5dfddcf8bf9c0f8"}, + {file = "libcst-1.8.2-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:b68ea4a6018abfea1f68d50f74de7d399172684c264eb09809023e2c8696fc23"}, + {file = "libcst-1.8.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2e264307ec49b2c72480422abafe80457f90b4e6e693b7ddf8a23d24b5c24001"}, + {file = "libcst-1.8.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5d5519962ce7c72d81888fb0c09e58e308ba4c376e76bcd853b48151063d6a8"}, + {file = "libcst-1.8.2-cp313-cp313-win_amd64.whl", hash = "sha256:b62aa11d6b74ed5545e58ac613d3f63095e5fd0254b3e0d1168fda991b9a6b41"}, + {file = "libcst-1.8.2-cp313-cp313-win_arm64.whl", hash = "sha256:9c2bd4ac288a9cdb7ffc3229a9ce8027a66a3fd3f2ab9e13da60f5fbfe91f3b2"}, + {file = "libcst-1.8.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:08a8c7d9922ca6eed24e2c13a3c552b3c186af8fc78e5d4820b58487d780ec19"}, + {file = "libcst-1.8.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:bba7c2b5063e8ada5a5477f9fa0c01710645426b5a8628ec50d558542a0a292e"}, + {file = "libcst-1.8.2-cp313-cp313t-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:d97c9fe13aacfbefded6861f5200dcb8e837da7391a9bdeb44ccb133705990af"}, + {file = "libcst-1.8.2-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:d2194ae959630aae4176a4b75bd320b3274c20bef2a5ca6b8d6fc96d3c608edf"}, + {file = "libcst-1.8.2-cp313-cp313t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0be639f5b2e1999a4b4a82a0f4633969f97336f052d0c131627983589af52f56"}, + {file = "libcst-1.8.2-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:6753e50904e05c27915933da41518ecd7a8ca4dd3602112ba44920c6e353a455"}, + {file = "libcst-1.8.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:706d07106af91c343150be86caeae1ea3851b74aa0730fcbbf8cd089e817f818"}, + {file = "libcst-1.8.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dd4310ea8ddc49cc8872e083737cf806299b17f93159a1f354d59aa08993e876"}, + {file = "libcst-1.8.2-cp313-cp313t-win_amd64.whl", hash = "sha256:51bbafdd847529e8a16d1965814ed17831af61452ee31943c414cb23451de926"}, + {file = "libcst-1.8.2-cp313-cp313t-win_arm64.whl", hash = "sha256:4f14f5045766646ed9e8826b959c6d07194788babed1e0ba08c94ea4f39517e3"}, + {file = "libcst-1.8.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:f69582e24667715e3860d80d663f1caeb2398110077e23cc0a1e0066a851f5ab"}, + {file = "libcst-1.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1ba85f9e6a7f37ef998168aa3fd28d263d7f83016bd306a4508a2394e5e793b4"}, + {file = "libcst-1.8.2-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:43ccaa6c54daa1749cec53710c70d47150965574d4c6d4c4f2e3f87b9bf9f591"}, + {file = "libcst-1.8.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:8a81d816c2088d2055112af5ecd82fdfbe8ff277600e94255e2639b07de10234"}, + {file = "libcst-1.8.2-cp39-cp39-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:449f9ff8a5025dcd5c8d4ad28f6c291de5de89e4c044b0bda96b45bef8999b75"}, + {file = "libcst-1.8.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:36d5ab95f39f855521585b0e819dc2d4d1b2a4080bad04c2f3de1e387a5d2233"}, + {file = "libcst-1.8.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:207575dec2dae722acf6ab39b4b361151c65f8f895fd37edf9d384f5541562e1"}, + {file = "libcst-1.8.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:52a1067cf31d9e9e4be514b253bea6276f1531dd7de6ab0917df8ce5b468a820"}, + {file = "libcst-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:59e8f611c977206eba294c296c2d29a1c1b1b88206cb97cd0d4847c1a3d923e7"}, + {file = "libcst-1.8.2-cp39-cp39-win_arm64.whl", hash = "sha256:ae22376633cfa3db21c4eed2870d1c36b5419289975a41a45f34a085b2d9e6ea"}, + {file = "libcst-1.8.2.tar.gz", hash = "sha256:66e82cedba95a6176194a817be4232c720312f8be6d2c8f3847f3317d95a0c7f"}, +] + +[package.dependencies] +pyyaml = {version = ">=5.2", markers = "python_version < \"3.13\""} +pyyaml-ft = {version = ">=8.0.0", markers = "python_version >= \"3.13\""} + +[[package]] +name = "markupsafe" +version = "3.0.2" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, +] + +[[package]] +name = "networkx" +version = "3.1" +description = "Python package for creating and manipulating graphs and networks" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "networkx-3.1-py3-none-any.whl", hash = "sha256:4f33f68cb2afcf86f28a45f43efc27a9386b535d567d2127f8f61d51dec58d36"}, + {file = "networkx-3.1.tar.gz", hash = "sha256:de346335408f84de0eada6ff9fafafff9bcda11f0a0dfaa931133debb146ab61"}, +] + +[package.extras] +default = ["matplotlib (>=3.4)", "numpy (>=1.20)", "pandas (>=1.3)", "scipy (>=1.8)"] +developer = ["mypy (>=1.1)", "pre-commit (>=3.2)"] +doc = ["nb2plots (>=0.6)", "numpydoc (>=1.5)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.13)", "sphinx (>=6.1)", "sphinx-gallery (>=0.12)", "texext (>=0.6.7)"] +extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.10)", "sympy (>=1.10)"] +test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] + +[[package]] +name = "ninja" +version = "1.11.1.4" +description = "Ninja is a small build system with a focus on speed" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "ninja-1.11.1.4-py3-none-macosx_10_9_universal2.whl", hash = "sha256:b33923c8da88e8da20b6053e38deb433f53656441614207e01d283ad02c5e8e7"}, + {file = "ninja-1.11.1.4-py3-none-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cede0af00b58e27b31f2482ba83292a8e9171cdb9acc2c867a3b6e40b3353e43"}, + {file = "ninja-1.11.1.4-py3-none-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:096487995473320de7f65d622c3f1d16c3ad174797602218ca8c967f51ec38a0"}, + {file = "ninja-1.11.1.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3090d4488fadf6047d0d7a1db0c9643a8d391f0d94729554dbb89b5bdc769d7"}, + {file = "ninja-1.11.1.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecce44a00325a93631792974659cf253a815cc6da4ec96f89742925dfc295a0d"}, + {file = "ninja-1.11.1.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c29bb66d2aa46a2409ab369ea804c730faec7652e8c22c1e428cc09216543e5"}, + {file = "ninja-1.11.1.4-py3-none-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:055f386fb550c2c9d6157e45e20a84d29c47968876b9c5794ae2aec46f952306"}, + {file = "ninja-1.11.1.4-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:f6186d7607bb090c3be1e10c8a56b690be238f953616626f5032238c66e56867"}, + {file = "ninja-1.11.1.4-py3-none-musllinux_1_1_i686.whl", hash = "sha256:cf4453679d15babc04ba023d68d091bb613091b67101c88f85d2171c6621c6eb"}, + {file = "ninja-1.11.1.4-py3-none-musllinux_1_1_ppc64le.whl", hash = "sha256:d4a6f159b08b0ac4aca5ee1572e3e402f969139e71d85d37c0e2872129098749"}, + {file = "ninja-1.11.1.4-py3-none-musllinux_1_1_s390x.whl", hash = "sha256:c3b96bd875f3ef1db782470e9e41d7508905a0986571f219d20ffed238befa15"}, + {file = "ninja-1.11.1.4-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:cf554e73f72c04deb04d0cf51f5fdb1903d9c9ca3d2344249c8ce3bd616ebc02"}, + {file = "ninja-1.11.1.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:cfdd09776436a1ff3c4a2558d3fc50a689fb9d7f1bdbc3e6f7b8c2991341ddb3"}, + {file = "ninja-1.11.1.4-py3-none-win32.whl", hash = "sha256:2ab67a41c90bea5ec4b795bab084bc0b3b3bb69d3cd21ca0294fc0fc15a111eb"}, + {file = "ninja-1.11.1.4-py3-none-win_amd64.whl", hash = "sha256:4617b3c12ff64b611a7d93fd9e378275512bb36eff8babff7c83f5116b4f8d66"}, + {file = "ninja-1.11.1.4-py3-none-win_arm64.whl", hash = "sha256:5713cf50c5be50084a8693308a63ecf9e55c3132a78a41ab1363a28b6caaaee1"}, + {file = "ninja-1.11.1.4.tar.gz", hash = "sha256:6aa39f6e894e0452e5b297327db00019383ae55d5d9c57c73b04f13bf79d438a"}, +] + +[[package]] +name = "packaging" +version = "25.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "platformdirs" +version = "4.3.8" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4"}, + {file = "platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.14.1)"] + +[[package]] +name = "pluggy" +version = "1.6.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, + {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["coverage", "pytest", "pytest-benchmark"] + +[[package]] +name = "pycnite" +version = "2024.7.31" +description = "Python bytecode utilities" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pycnite-2024.7.31-py3-none-any.whl", hash = "sha256:9ff9c09d35056435b867e14ebf79626ca94b6017923a0bf9935377fa90d4cbb3"}, + {file = "pycnite-2024.7.31.tar.gz", hash = "sha256:5125f1c95aef4a23b9bec3b32fae76873dcd46324fa68e39c10fa852ecdea340"}, +] + +[[package]] +name = "pydantic" +version = "2.11.7" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b"}, + {file = "pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.33.2" +typing-extensions = ">=4.12.2" +typing-inspection = ">=0.4.0" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] + +[[package]] +name = "pydantic-core" +version = "2.33.2" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"}, + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"}, + {file = "pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pydot" +version = "4.0.1" +description = "Python interface to Graphviz's Dot" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pydot-4.0.1-py3-none-any.whl", hash = "sha256:869c0efadd2708c0be1f916eb669f3d664ca684bc57ffb7ecc08e70d5e93fee6"}, + {file = "pydot-4.0.1.tar.gz", hash = "sha256:c2148f681c4a33e08bf0e26a9e5f8e4099a82e0e2a068098f32ce86577364ad5"}, +] + +[package.dependencies] +pyparsing = ">=3.1.0" + +[package.extras] +dev = ["chardet", "parameterized", "pydot[lint]", "pydot[types]"] +lint = ["ruff"] +release = ["zest.releaser[recommended]"] +tests = ["pydot[dev]", "pytest", "pytest-cov", "pytest-xdist[psutil]", "tox"] +types = ["mypy"] + +[[package]] +name = "pyparsing" +version = "3.2.3" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pyparsing-3.2.3-py3-none-any.whl", hash = "sha256:a749938e02d6fd0b59b356ca504a24982314bb090c383e3cf201c95ef7e2bfcf"}, + {file = "pyparsing-3.2.3.tar.gz", hash = "sha256:b9c13f1ab8b3b542f72e28f634bad4de758ab3ce4546e4301970ad6fa77c38be"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytype" +version = "2023.12.18" +description = "Python type inferencer" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pytype-2023.12.18-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:dd1a76226ab2e1891eee561d606c8542e49287dbf72fe8dcf010459e2867e4ac"}, + {file = "pytype-2023.12.18-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5df2aaf8331f87d280480da2626eecba8992a04fefd82b060dbde2a55166862e"}, + {file = "pytype-2023.12.18-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6014cc7e750da1c1c5af5f3428da1f2ee0c6921903a9ea6eaee0a975a4c5f11d"}, + {file = "pytype-2023.12.18-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ccc14ae3e76794b750d04a25cbe09803a54fe1638b514d539130802d49192d6"}, + {file = "pytype-2023.12.18-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:835b13b232005fe23f75f5f4958243f5833f028acc78fc7e7c3e45288e2d59a3"}, + {file = "pytype-2023.12.18-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f42107fbac9b15458b3c7b94d3b86b3fe99566a5dad72ee86e9be8b2199c457"}, + {file = "pytype-2023.12.18-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:8cd1a8f6039abe5a15af8c98d3c6cd896c820ea40d472b21efd701ddd855f7f3"}, + {file = "pytype-2023.12.18-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f48410a73e14fd863da3b3d3ff5d01716cd6b3ae1ceb23db577d6aed9629aab6"}, + {file = "pytype-2023.12.18-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf0d02e5385725fff2b177ed945792407139a13c3f34e7218a0cb6c3026c8563"}, + {file = "pytype-2023.12.18-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:a5409ff4816e6cfca22c248d1bea4c152dce0c4dc4ec65a527238d6ce0b270e7"}, + {file = "pytype-2023.12.18-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14a9bacb67eb841c4db3f9f5b200f257b6b556bc91172eada7caa1e07b4b762e"}, + {file = "pytype-2023.12.18-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c83832f915fcdf991f6463881022fb704af54b9138af469ed47b55292b34b6b"}, + {file = "pytype-2023.12.18.tar.gz", hash = "sha256:a3ffb2dcee71ddf95b4321b8eff066826a02e99336baa4d47851091a6bcc3457"}, +] + +[package.dependencies] +attrs = ">=21.4.0" +importlab = ">=0.8" +jinja2 = ">=3.1.2" +libcst = ">=1.0.1" +networkx = "<3.2" +ninja = ">=1.10.0.post2" +pycnite = ">=2023.10.11" +pydot = ">=1.4.2" +tabulate = ">=0.8.10" +toml = ">=0.10.2" +typing-extensions = ">=4.3.0" + +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version < \"3.13\"" +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "pyyaml-ft" +version = "8.0.0" +description = "YAML parser and emitter for Python with support for free-threading" +optional = false +python-versions = ">=3.13" +groups = ["dev"] +markers = "python_version >= \"3.13\"" +files = [ + {file = "pyyaml_ft-8.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8c1306282bc958bfda31237f900eb52c9bedf9b93a11f82e1aab004c9a5657a6"}, + {file = "pyyaml_ft-8.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:30c5f1751625786c19de751e3130fc345ebcba6a86f6bddd6e1285342f4bbb69"}, + {file = "pyyaml_ft-8.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fa992481155ddda2e303fcc74c79c05eddcdbc907b888d3d9ce3ff3e2adcfb0"}, + {file = "pyyaml_ft-8.0.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cec6c92b4207004b62dfad1f0be321c9f04725e0f271c16247d8b39c3bf3ea42"}, + {file = "pyyaml_ft-8.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06237267dbcab70d4c0e9436d8f719f04a51123f0ca2694c00dd4b68c338e40b"}, + {file = "pyyaml_ft-8.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8a7f332bc565817644cdb38ffe4739e44c3e18c55793f75dddb87630f03fc254"}, + {file = "pyyaml_ft-8.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7d10175a746be65f6feb86224df5d6bc5c049ebf52b89a88cf1cd78af5a367a8"}, + {file = "pyyaml_ft-8.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:58e1015098cf8d8aec82f360789c16283b88ca670fe4275ef6c48c5e30b22a96"}, + {file = "pyyaml_ft-8.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:e64fa5f3e2ceb790d50602b2fd4ec37abbd760a8c778e46354df647e7c5a4ebb"}, + {file = "pyyaml_ft-8.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8d445bf6ea16bb93c37b42fdacfb2f94c8e92a79ba9e12768c96ecde867046d1"}, + {file = "pyyaml_ft-8.0.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c56bb46b4fda34cbb92a9446a841da3982cdde6ea13de3fbd80db7eeeab8b49"}, + {file = "pyyaml_ft-8.0.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dab0abb46eb1780da486f022dce034b952c8ae40753627b27a626d803926483b"}, + {file = "pyyaml_ft-8.0.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd48d639cab5ca50ad957b6dd632c7dd3ac02a1abe0e8196a3c24a52f5db3f7a"}, + {file = "pyyaml_ft-8.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:052561b89d5b2a8e1289f326d060e794c21fa068aa11255fe71d65baf18a632e"}, + {file = "pyyaml_ft-8.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3bb4b927929b0cb162fb1605392a321e3333e48ce616cdcfa04a839271373255"}, + {file = "pyyaml_ft-8.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:de04cfe9439565e32f178106c51dd6ca61afaa2907d143835d501d84703d3793"}, + {file = "pyyaml_ft-8.0.0.tar.gz", hash = "sha256:0c947dce03954c7b5d38869ed4878b2e6ff1d44b08a0d84dc83fdad205ae39ab"}, +] + +[[package]] +name = "readchar" +version = "4.2.1" +description = "Library to easily read single chars and key strokes" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "readchar-4.2.1-py3-none-any.whl", hash = "sha256:a769305cd3994bb5fa2764aa4073452dc105a4ec39068ffe6efd3c20c60acc77"}, + {file = "readchar-4.2.1.tar.gz", hash = "sha256:91ce3faf07688de14d800592951e5575e9c7a3213738ed01d394dcc949b79adb"}, +] + +[[package]] +name = "ruff" +version = "0.1.15" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5fe8d54df166ecc24106db7dd6a68d44852d14eb0729ea4672bb4d96c320b7df"}, + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f0bfbb53c4b4de117ac4d6ddfd33aa5fc31beeaa21d23c45c6dd249faf9126f"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d432aec35bfc0d800d4f70eba26e23a352386be3a6cf157083d18f6f5881c8"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9405fa9ac0e97f35aaddf185a1be194a589424b8713e3b97b762336ec79ff807"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66ec24fe36841636e814b8f90f572a8c0cb0e54d8b5c2d0e300d28a0d7bffec"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6f8ad828f01e8dd32cc58bc28375150171d198491fc901f6f98d2a39ba8e3ff5"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86811954eec63e9ea162af0ffa9f8d09088bab51b7438e8b6488b9401863c25e"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd4025ac5e87d9b80e1f300207eb2fd099ff8200fa2320d7dc066a3f4622dc6b"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b17b93c02cdb6aeb696effecea1095ac93f3884a49a554a9afa76bb125c114c1"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ddb87643be40f034e97e97f5bc2ef7ce39de20e34608f3f829db727a93fb82c5"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:abf4822129ed3a5ce54383d5f0e964e7fef74a41e48eb1dfad404151efc130a2"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6c629cf64bacfd136c07c78ac10a54578ec9d1bd2a9d395efbee0935868bf852"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1bab866aafb53da39c2cadfb8e1c4550ac5340bb40300083eb8967ba25481447"}, + {file = "ruff-0.1.15-py3-none-win32.whl", hash = "sha256:2417e1cb6e2068389b07e6fa74c306b2810fe3ee3476d5b8a96616633f40d14f"}, + {file = "ruff-0.1.15-py3-none-win_amd64.whl", hash = "sha256:3837ac73d869efc4182d9036b1405ef4c73d9b1f88da2413875e34e0d6919587"}, + {file = "ruff-0.1.15-py3-none-win_arm64.whl", hash = "sha256:9a933dfb1c14ec7a33cceb1e49ec4a16b51ce3c20fd42663198746efc0427360"}, + {file = "ruff-0.1.15.tar.gz", hash = "sha256:f6dfa8c1b21c913c326919056c390966648b680966febcb796cc9d1aaab8564e"}, +] + +[[package]] +name = "runs" +version = "1.2.2" +description = "🏃 Run a block of text as a subprocess 🏃" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "runs-1.2.2-py3-none-any.whl", hash = "sha256:0980dcbc25aba1505f307ac4f0e9e92cbd0be2a15a1e983ee86c24c87b839dfd"}, + {file = "runs-1.2.2.tar.gz", hash = "sha256:9dc1815e2895cfb3a48317b173b9f1eac9ba5549b36a847b5cc60c3bf82ecef1"}, +] + +[package.dependencies] +xmod = "*" + +[[package]] +name = "tabulate" +version = "0.9.0" +description = "Pretty-print tabular data" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, + {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, +] + +[package.extras] +widechars = ["wcwidth"] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["dev"] +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "tomli" +version = "2.2.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version == \"3.10\"" +files = [ + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, +] + +[[package]] +name = "typing-extensions" +version = "4.14.1" +description = "Backported and Experimental Type Hints for Python 3.9+" +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76"}, + {file = "typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36"}, +] + +[[package]] +name = "typing-inspection" +version = "0.4.1" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51"}, + {file = "typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28"}, +] + +[package.dependencies] +typing-extensions = ">=4.12.0" + +[[package]] +name = "wcwidth" +version = "0.2.13" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, +] + +[[package]] +name = "xmod" +version = "1.8.1" +description = "🌱 Turn any object into a module 🌱" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "xmod-1.8.1-py3-none-any.whl", hash = "sha256:a24e9458a4853489042522bdca9e50ee2eac5ab75c809a91150a8a7f40670d48"}, + {file = "xmod-1.8.1.tar.gz", hash = "sha256:38c76486b9d672c546d57d8035df0beb7f4a9b088bc3fb2de5431ae821444377"}, +] + +[metadata] +lock-version = "2.1" +python-versions = "^3.10" +content-hash = "be49abab8bd72fcb05f2cba95c8fbf43f72857741d28edd07eedeb4c8e9db69a" diff --git a/etc/scripts/ce-properties-wizard/pyproject.toml b/etc/scripts/ce-properties-wizard/pyproject.toml new file mode 100644 index 000000000..5fce7f9d0 --- /dev/null +++ b/etc/scripts/ce-properties-wizard/pyproject.toml @@ -0,0 +1,39 @@ +[tool.poetry] +name = "ce-properties-wizard" +version = "0.1.0" +description = "Interactive wizard for adding compilers to Compiler Explorer" +authors = ["Compiler Explorer Team"] +readme = "README.md" +packages = [{include = "ce_properties_wizard"}] + +[tool.poetry.dependencies] +python = "^3.10" +click = "^8.1.7" +inquirer = "^3.1.3" +pydantic = "^2.5.0" +colorama = "^0.4.6" + +[tool.poetry.group.dev.dependencies] +pytest = "^7.4.3" +black = "^23.11.0" +ruff = "^0.1.6" +pytype = "^2023.11.21" + +[tool.poetry.scripts] +ce-props-wizard = "ce_properties_wizard.main:cli" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + +[tool.black] +line-length = 120 +target-version = ['py310'] + +[tool.ruff] +line-length = 120 +select = ["E", "F", "I", "N", "W"] + +[tool.pytype] +inputs = ['ce_properties_wizard'] +python_version = '3.10' \ No newline at end of file diff --git a/etc/scripts/ce-properties-wizard/run.ps1 b/etc/scripts/ce-properties-wizard/run.ps1 new file mode 100644 index 000000000..8efe4c67e --- /dev/null +++ b/etc/scripts/ce-properties-wizard/run.ps1 @@ -0,0 +1,100 @@ +# CE Properties Wizard runner script for Windows PowerShell + +param( + [Parameter(ValueFromRemainingArguments=$true)] + [string[]]$Arguments +) + +# Get the directory where this script is located +$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path +Set-Location $ScriptDir + +# Check if poetry is installed +if (-not (Get-Command poetry -ErrorAction SilentlyContinue)) { + Write-Host "Poetry is not installed. Installing Poetry..." -ForegroundColor Yellow + + # Check if Python is available + $pythonCmd = $null + foreach ($cmd in @("python", "python3", "py")) { + if (Get-Command $cmd -ErrorAction SilentlyContinue) { + $pythonCmd = $cmd + break + } + } + + if (-not $pythonCmd) { + Write-Host "Python is not installed. Please install Python first." -ForegroundColor Red + exit 1 + } + + try { + # Download and install Poetry + Write-Host "Downloading Poetry installer..." -ForegroundColor Green + $poetryInstaller = Invoke-RestMethod -Uri https://install.python-poetry.org + $poetryInstaller | & $pythonCmd - + + # Update PATH for current session + $env:Path = "$env:APPDATA\Python\Scripts;$env:Path" + + # Verify installation + if (-not (Get-Command poetry -ErrorAction SilentlyContinue)) { + Write-Host "Poetry installation failed. Please install manually from https://python-poetry.org/docs/#installation" -ForegroundColor Red + exit 1 + } + + Write-Host "Poetry installed successfully!" -ForegroundColor Green + } catch { + Write-Host "Failed to install Poetry automatically: $_" -ForegroundColor Red + Write-Host "Please install manually from https://python-poetry.org/docs/#installation" -ForegroundColor Yellow + exit 1 + } +} + +# Install dependencies if needed +if (-not (Test-Path ".venv")) { + Write-Host "Setting up virtual environment..." -ForegroundColor Green + # On Windows, use --only main to skip dev dependencies and avoid pytype build issues + poetry install --only main + Write-Host "Note: Development dependencies skipped on Windows (pytype doesn't build on Windows)" -ForegroundColor Yellow +} + +# Check if we're running under Git Bash (which can cause issues with Poetry) +$isGitBash = $false +if ($env:SHELL -match "bash" -or $env:MSYSTEM) { + $isGitBash = $true + Write-Host "Warning: Git Bash detected. This may cause issues with Poetry." -ForegroundColor Yellow + + # Find the virtual environment + $venvPython = Join-Path $ScriptDir ".venv\Scripts\python.exe" + if (-not (Test-Path $venvPython)) { + # Check Poetry's cache location + $poetryVenvs = "$env:LOCALAPPDATA\pypoetry\Cache\virtualenvs" + $venvDir = Get-ChildItem $poetryVenvs -Directory -ErrorAction SilentlyContinue | Where-Object { $_.Name -like "ce-properties-wizard*" } | Select-Object -First 1 + if ($venvDir) { + $venvPython = Join-Path $venvDir.FullName "Scripts\python.exe" + } + } + + if (Test-Path $venvPython) { + Write-Host "Using Python at: $venvPython" -ForegroundColor Green + # Set UTF-8 encoding for Python to handle Unicode characters + $env:PYTHONIOENCODING = "utf-8" + if ($Arguments) { + & $venvPython -m ce_properties_wizard.main @Arguments + } else { + & $venvPython -m ce_properties_wizard.main + } + } else { + Write-Host "Could not find Python executable in virtual environment" -ForegroundColor Red + Write-Host "This might be due to Git Bash compatibility issues with Poetry on Windows" -ForegroundColor Yellow + Write-Host "Please run this script in a native PowerShell window instead" -ForegroundColor Yellow + exit 1 + } +} else { + # Run the wizard with all arguments passed through + if ($Arguments) { + poetry run ce-props-wizard @Arguments + } else { + poetry run ce-props-wizard + } +} diff --git a/etc/scripts/ce-properties-wizard/run.sh b/etc/scripts/ce-properties-wizard/run.sh new file mode 100755 index 000000000..cb563a4ed --- /dev/null +++ b/etc/scripts/ce-properties-wizard/run.sh @@ -0,0 +1,78 @@ +#!/bin/bash +# CE Properties Wizard runner script + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +cd "$SCRIPT_DIR" + +# Check if poetry is installed +if ! command -v poetry &> /dev/null; then + echo "Poetry is not installed. Installing Poetry..." + + # Check if Python is available + PYTHON_CMD="" + for cmd in python3 python py; do + if command -v $cmd &> /dev/null; then + PYTHON_CMD=$cmd + break + fi + done + + if [ -z "$PYTHON_CMD" ]; then + echo "Python is not installed. Please install Python first." + exit 1 + fi + + # Install Poetry + echo "Downloading and installing Poetry..." + if curl -sSL https://install.python-poetry.org | $PYTHON_CMD -; then + # Add Poetry to PATH for current session + export PATH="$HOME/.local/bin:$PATH" + + # Verify installation + if ! command -v poetry &> /dev/null; then + echo "Poetry installation failed. Please install manually from https://python-poetry.org/docs/#installation" + exit 1 + fi + + echo "Poetry installed successfully!" + else + echo "Failed to install Poetry automatically." + echo "Please install manually from https://python-poetry.org/docs/#installation" + exit 1 + fi +fi + +# Install dependencies if needed +if [ ! -d ".venv" ]; then + echo "Setting up virtual environment..." + poetry install +fi + +# Check for --format parameter +if [[ "$1" == "--format" ]]; then + shift + + if [[ "$1" == "--check" ]]; then + echo "Checking code formatting..." + poetry run black --check --diff . + echo "Checking code with ruff..." + poetry run ruff check . + echo "Running pytype..." + poetry run pytype . + echo "All formatting checks passed!" + else + echo "Formatting code with black..." + poetry run black . + echo "Formatting code with ruff..." + poetry run ruff check --fix . + echo "Running pytype..." + poetry run pytype . + echo "Code formatting complete!" + fi + exit 0 +fi + +# Run the wizard with all arguments passed through +poetry run ce-props-wizard "$@" \ No newline at end of file diff --git a/etc/scripts/util/propscheck.py b/etc/scripts/util/propscheck.py index 29b466fc8..aeb551941 100644 --- a/etc/scripts/util/propscheck.py +++ b/etc/scripts/util/propscheck.py @@ -32,6 +32,7 @@ from argparse import Namespace parser = argparse.ArgumentParser(description='Checks for incorrect/suspicious properties.') parser.add_argument ('--check-suspicious-in-default-prop', required=False, action="store_true") parser.add_argument ('--config-dir', required=False, default="./etc/config") +parser.add_argument ('--check-local', required=False, action="store_true") PROP_RE = re.compile(r'([^# ]*)=(.*)#*') @@ -108,6 +109,7 @@ def process_file(file: str, args: Namespace): listed_groups = set() seen_groups = set() + no_compilers_list = set() listed_compilers = set() seen_compilers_exe = set() seen_compilers_id = set() @@ -134,7 +136,7 @@ def process_file(file: str, args: Namespace): duplicated_compiler_references = set() duplicated_group_references = set() - suspicious_check = args.check_suspicious_in_default_prop or not (file.endswith('.defaults.properties')) + suspicious_check = args.check_suspicious_in_default_prop or (not file.endswith('.defaults.properties') and not file.endswith('.local.properties')) suspicious_path = set() seen_typo_compilers = set() @@ -235,7 +237,19 @@ def process_file(file: str, args: Namespace): bad_tools_exe = listed_tools.symmetric_difference(seen_tools_exe) bad_tools_id = listed_tools.symmetric_difference(seen_tools_id) bad_default = default_compiler - listed_compilers + + if len(listed_compilers) == 0 and len(listed_groups) == 0: + allowed = ('execution.','compiler-explorer.', 'aws.', 'asm-docs.', 'builtin.', '.defaults.') + is_allowed_to_be_empty = False + for allow in allowed: + if allow in file: + is_allowed_to_be_empty = True + break + if not is_allowed_to_be_empty: + no_compilers_list.add(file) + return { + "no_compilers_list": no_compilers_list, "not_a_valid_prop": not_a_valid_prop, "bad_compilers_exe": bad_compilers_exe - disabled, "bad_compilers_id": bad_compilers_ids - disabled, @@ -256,11 +270,17 @@ def process_file(file: str, args: Namespace): } def process_folder(folder: str, args): - return [(f, process_file(join(folder, f), args)) - for f in listdir(folder) - if isfile(join(folder, f)) - and not f.endswith('.local.properties') - and f.endswith('.properties')] + if not args.check_local: + return [(f, process_file(join(folder, f), args)) + for f in listdir(folder) + if isfile(join(folder, f)) + and not f.endswith('.local.properties') + and f.endswith('.properties')] + else: + return [(f, process_file(join(folder, f), args)) + for f in listdir(folder) + if isfile(join(folder, f)) + and f.endswith('.properties')] def problems_found(file_result): return any(len(file_result[r]) > 0 for r in file_result if r != "filename")