651 lines
24 KiB
Python
Executable File
651 lines
24 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
|
|
############################################
|
|
# shuriken - A ninja-based C++ build tool
|
|
# ------------------------------------------
|
|
# Version: 0.1.0
|
|
# Author: binaryDiv
|
|
# License: MIT License
|
|
# https://git.0xbd.space/binaryDiv/shuriken
|
|
############################################
|
|
|
|
import argparse
|
|
import copy
|
|
import json
|
|
import shlex
|
|
import subprocess
|
|
import sys
|
|
from collections import namedtuple
|
|
from dataclasses import asdict, dataclass, field
|
|
from pathlib import Path
|
|
from typing import Any
|
|
|
|
import yaml
|
|
|
|
|
|
class ConfigValidationException(Exception):
|
|
pass
|
|
|
|
|
|
@dataclass(kw_only=True)
|
|
class TargetConfig:
|
|
cpp_compiler: str | None = None
|
|
cpp_standard: str | None = None
|
|
cpp_flags: str | None = None
|
|
cpp_flags_extra: str | None = None
|
|
|
|
linker_flags: str | None = None
|
|
linker_flags_extra: str | None = None
|
|
linker_args: str | None = None
|
|
linker_args_extra: str | None = None
|
|
|
|
output_file: str | None = None
|
|
run_command: str | None = None
|
|
|
|
def update(self, data: dict) -> None:
|
|
for key, value in data.items():
|
|
if hasattr(self, key) is True and value is not None:
|
|
setattr(self, key, str(value))
|
|
|
|
@property
|
|
def merged_cpp_flags(self) -> str:
|
|
return f"{self.cpp_flags or ''} {self.cpp_flags_extra or ''}".strip()
|
|
|
|
@property
|
|
def merged_linker_flags(self) -> str:
|
|
return f"{self.linker_flags or ''} {self.linker_flags_extra or ''}".strip()
|
|
|
|
@property
|
|
def merged_linker_args(self) -> str:
|
|
return f"{self.linker_args or ''} {self.linker_args_extra or ''}".strip()
|
|
|
|
|
|
@dataclass(kw_only=True)
|
|
class ShurikenConfig:
|
|
__path_fields = ['source_dir', 'build_dir']
|
|
__list_fields = []
|
|
|
|
source_dir: Path = Path('src')
|
|
build_dir: Path = Path('build')
|
|
|
|
defaults: TargetConfig = field(default_factory=lambda: TargetConfig(
|
|
cpp_compiler='clang++',
|
|
cpp_standard='c++20',
|
|
cpp_flags='',
|
|
linker_flags='',
|
|
linker_args='',
|
|
))
|
|
|
|
default_target: str = ''
|
|
targets: dict[str, TargetConfig] = field(default_factory=dict)
|
|
|
|
def update(self, data: dict) -> None:
|
|
for key, value in data.items():
|
|
# Ignore unknown config keys
|
|
if not hasattr(self, key):
|
|
continue
|
|
|
|
if key == 'defaults':
|
|
assert (type(value) is dict)
|
|
self.defaults.update(value)
|
|
elif key == 'targets':
|
|
assert (type(value) is dict)
|
|
for target_name, target_config in value.items():
|
|
assert (type(target_name) is str and len(target_name) > 0)
|
|
assert (type(target_config) is dict or target_config is None)
|
|
|
|
# Ignore "hidden" targets starting with a dot (can be used for YAML anchors)
|
|
if target_name[0] == '.':
|
|
continue
|
|
|
|
if target_name not in self.targets:
|
|
self.targets[target_name] = TargetConfig()
|
|
if target_config is not None:
|
|
self.targets[target_name].update(target_config)
|
|
elif key in self.__path_fields:
|
|
setattr(self, key, Path(value))
|
|
elif key in self.__list_fields:
|
|
assert (type(value) is list)
|
|
setattr(self, key, list(str(item) for item in value))
|
|
else:
|
|
setattr(self, key, str(value))
|
|
|
|
def update_from_yaml(self, file_path: Path) -> None:
|
|
with file_path.open('r') as file:
|
|
self.update(yaml.safe_load(file))
|
|
|
|
def validate(self) -> None:
|
|
# Validate root elements
|
|
if not self.default_target:
|
|
raise ConfigValidationException('default_target must be set')
|
|
if not self.source_dir or not self.source_dir.is_dir():
|
|
raise ConfigValidationException('source_dir must be set to an existing directory')
|
|
if not self.build_dir:
|
|
# build_dir will be automatically created if it doesn't exist
|
|
raise ConfigValidationException('build_dir must be set')
|
|
|
|
# Validate that targets and default target are defined
|
|
if not self.targets:
|
|
raise ConfigValidationException('At least one target must be defined')
|
|
if self.default_target not in self.targets:
|
|
raise ConfigValidationException('default_target is not defined in targets')
|
|
|
|
# Validate individual build targets
|
|
for target_name, target_config in self.targets.items():
|
|
if not target_config.cpp_compiler and not self.defaults.cpp_compiler:
|
|
raise ConfigValidationException(f'Target "{target_name}": cpp_compiler must be set')
|
|
if not target_config.cpp_standard and not self.defaults.cpp_standard:
|
|
raise ConfigValidationException(f'Target "{target_name}": cpp_standard must be set')
|
|
if not target_config.output_file and not self.defaults.output_file:
|
|
raise ConfigValidationException(f'Target "{target_name}": output_file must be set')
|
|
|
|
def get_ninja_file_path(self, target: str) -> Path:
|
|
return self.build_dir / f'build.{target}.ninja'
|
|
|
|
def get_merged_target_config(self, target: str) -> TargetConfig:
|
|
assert target in self.targets, f'Target "{target}" not defined!'
|
|
|
|
merged_config = copy.deepcopy(self.defaults)
|
|
merged_config.update(asdict(self.targets[target]))
|
|
return merged_config
|
|
|
|
|
|
class ShurikenArgumentParser:
|
|
argument_parser: argparse.ArgumentParser
|
|
|
|
default_config_path: Path = Path('shuriken.yaml')
|
|
default_config_override_path: Path = Path('shuriken.override.yaml')
|
|
|
|
def __init__(self):
|
|
self.argument_parser = argparse.ArgumentParser()
|
|
self.argument_parser.add_argument(
|
|
'-c', '--config',
|
|
type=Path,
|
|
help=f'Config file (default: {self.default_config_path})',
|
|
metavar='FILE',
|
|
)
|
|
self.argument_parser.add_argument(
|
|
'-o', '--config-override',
|
|
type=Path,
|
|
help=f'Config file to override config values (default: {self.default_config_override_path})',
|
|
metavar='FILE',
|
|
)
|
|
self.argument_parser.add_argument(
|
|
'-t', '--target',
|
|
help=f'Build target to use (default: see "default_target" in config)',
|
|
)
|
|
|
|
# Define subcommands
|
|
subparsers = self.argument_parser.add_subparsers(dest='command', title='Subcommands')
|
|
|
|
subparser_generate = subparsers.add_parser('generate', help='Generate Ninja build files')
|
|
subparser_generate.add_argument(
|
|
'--stdout',
|
|
action='store_true',
|
|
dest='generate_stdout',
|
|
help='Print generated Ninja file to stdout instead of writing to a file',
|
|
)
|
|
|
|
subparser_build = subparsers.add_parser('build', help='Build project (default command)')
|
|
subparser_compdb = subparsers.add_parser('compdb', help='Generate compilation database')
|
|
|
|
subparser_run = subparsers.add_parser('run', help='Build and run project')
|
|
subparser_run.add_argument(
|
|
'run_args',
|
|
nargs='*',
|
|
help='Arguments that are passed to the application',
|
|
)
|
|
|
|
subparser_clean = subparsers.add_parser('clean', help='Remove all build files of the current target')
|
|
subparser_clean.add_argument(
|
|
'--all',
|
|
action='store_true',
|
|
dest='clean_all',
|
|
help='Remove all generated files from all targets',
|
|
)
|
|
|
|
subparser_dump_config = subparsers.add_parser(
|
|
'dump-config',
|
|
help='Dumps the parsed config as well as the effective build target config (for debugging)',
|
|
)
|
|
subparser_dyndep = subparsers.add_parser(
|
|
'p1689-to-dyndeps',
|
|
help='(Internal command) Parse p1689 file and generate Ninja dyndeps',
|
|
)
|
|
|
|
def parse_args(self, args: list[str] | None = None) -> argparse.Namespace:
|
|
parsed_args = self.argument_parser.parse_args(args)
|
|
parsed_args.config = self.ensure_valid_path_or_default(
|
|
parsed_args.config,
|
|
self.default_config_path,
|
|
required=True,
|
|
)
|
|
parsed_args.config_override = self.ensure_valid_path_or_default(
|
|
parsed_args.config_override,
|
|
self.default_config_override_path,
|
|
)
|
|
return parsed_args
|
|
|
|
def error(self, message: str) -> None:
|
|
self.argument_parser.error(message)
|
|
|
|
def ensure_valid_path_or_default(self, path: Path | None, default: Path, *, required: bool = False) -> Path | None:
|
|
if path is None:
|
|
if required or default.exists():
|
|
path = default
|
|
if path is not None and not path.exists():
|
|
self.error(f'File not found: {path}')
|
|
return path
|
|
|
|
|
|
class ShurikenCli:
|
|
args: argparse.Namespace
|
|
config: ShurikenConfig
|
|
|
|
def __init__(self, input_args: list[str] | None = None):
|
|
# Parse command line arguments
|
|
argument_parser = ShurikenArgumentParser()
|
|
self.args = argument_parser.parse_args(input_args)
|
|
|
|
# Parse config files
|
|
self.config = ShurikenConfig()
|
|
if self.args.config:
|
|
self.config.update_from_yaml(self.args.config)
|
|
if self.args.config_override:
|
|
self.config.update_from_yaml(self.args.config_override)
|
|
|
|
# Validate parsed config
|
|
try:
|
|
self.config.validate()
|
|
except ConfigValidationException as exc:
|
|
self.log_error(f'Config validation error: {exc}')
|
|
exit(1)
|
|
|
|
# Check that --target is a valid target
|
|
if self.args.target is not None and self.args.target not in self.config.targets:
|
|
argument_parser.error(f'Target "{self.args.target}" not defined in config')
|
|
|
|
def get_selected_target(self) -> str:
|
|
return self.args.target if self.args.target is not None else self.config.default_target
|
|
|
|
@staticmethod
|
|
def log_info(message: str) -> None:
|
|
print(f'shuriken: {message}')
|
|
|
|
@staticmethod
|
|
def log_error(message: str) -> None:
|
|
print(f'shuriken: Error: {message}')
|
|
|
|
def run(self) -> None:
|
|
# Get build target (specified with --target or default_target from config)
|
|
selected_target = self.get_selected_target()
|
|
|
|
# Default command: build
|
|
command = self.args.command or 'build'
|
|
|
|
match command:
|
|
case 'generate':
|
|
self.generate_ninja_file(selected_target, write_to_stdout=self.args.generate_stdout)
|
|
|
|
case 'compdb':
|
|
# Always generate compilation database for default target
|
|
# (avoids issues with clang-scan-deps in different environments like emscripten)
|
|
self.generate_compdb(self.config.default_target)
|
|
|
|
case 'build':
|
|
self.build_project(selected_target)
|
|
|
|
case 'run':
|
|
self.run_project(selected_target)
|
|
|
|
case 'clean':
|
|
if self.args.clean_all:
|
|
self.log_info('Cleaning up *all* build targets')
|
|
for target in self.config.targets:
|
|
self.clean_project(target)
|
|
else:
|
|
self.clean_project(selected_target)
|
|
|
|
case 'dump-config':
|
|
self.dump_config()
|
|
|
|
case 'p1689-to-dyndeps':
|
|
self.generate_dyndeps_from_p1689(selected_target)
|
|
|
|
case _:
|
|
raise Exception(f'Unknown subcommand "{self.args.command}"')
|
|
|
|
def call_ninja(self, target: str, *args: str) -> None:
|
|
run_result = subprocess.run([
|
|
'ninja',
|
|
'-f',
|
|
self.config.get_ninja_file_path(target),
|
|
'-v',
|
|
*args,
|
|
])
|
|
|
|
if run_result.returncode > 0:
|
|
self.log_error(f'Ninja exited with return code {run_result.returncode}')
|
|
exit(1)
|
|
|
|
def generate_ninja_file(self, target: str, *, write_to_stdout: bool = False) -> None:
|
|
generator = NinjaFileGenerator(config=self.config, target=target)
|
|
ninja_file_content = generator.generate_file()
|
|
|
|
if write_to_stdout:
|
|
print(ninja_file_content)
|
|
return
|
|
|
|
# Create build directory for target if it doesn't exist yet
|
|
self.config.build_dir.joinpath(target).mkdir(parents=True, exist_ok=True)
|
|
|
|
ninja_file_path = self.config.get_ninja_file_path(target)
|
|
ninja_file_content += '\n'
|
|
|
|
# Check if file content has changed before writing it (to preserve the modified timestamp if unmodified)
|
|
if ninja_file_path.exists() and ninja_file_path.read_text() == ninja_file_content:
|
|
self.log_info(f'Ninja file for target {target} is up to date')
|
|
else:
|
|
# Write Ninja file
|
|
self.log_info(f'Generating Ninja file for target {target}')
|
|
with ninja_file_path.open('w') as ninja_file:
|
|
ninja_file.write(ninja_file_content)
|
|
|
|
# Create symlink build.ninja to default target ninja file for convenience
|
|
if target == self.config.default_target:
|
|
self.symlink_build_ninja_file(ninja_file_path)
|
|
|
|
def symlink_build_ninja_file(self, symlink_target: Path) -> None:
|
|
symlink_path = Path('build.ninja')
|
|
|
|
if symlink_path.exists() and not symlink_path.is_symlink():
|
|
self.log_error('Cannot symlink build.ninja: File exists and is not a symlink')
|
|
return
|
|
|
|
if symlink_path.is_symlink() and symlink_path.resolve() == symlink_target.absolute():
|
|
# Symlink already correct
|
|
return
|
|
|
|
# Remove existing symlink if it exists and create a new symlink
|
|
self.log_info(f'Updating build.ninja symlink to {symlink_target}')
|
|
symlink_path.unlink(missing_ok=True)
|
|
symlink_path.symlink_to(symlink_target)
|
|
|
|
def generate_compdb(self, target: str) -> None:
|
|
# Regenerate Ninja file if necessary
|
|
self.generate_ninja_file(target)
|
|
|
|
self.log_info('Generating compilation database')
|
|
self.call_ninja(target, 'compdb')
|
|
|
|
def build_project(self, target: str) -> None:
|
|
# Regenerate Ninja file if necessary
|
|
self.generate_ninja_file(target)
|
|
|
|
self.log_info(f'Building project for target {target}')
|
|
self.call_ninja(target)
|
|
|
|
def run_project(self, target: str) -> None:
|
|
# Build project using Ninja if necessary
|
|
self.build_project(target)
|
|
|
|
merged_target_config = self.config.get_merged_target_config(target)
|
|
output_file_path = self.config.build_dir / target / merged_target_config.output_file
|
|
|
|
# Get run command from config or default to running the output file
|
|
run_command = merged_target_config.run_command or './{out} {args}'
|
|
|
|
# Replace placeholders in run command
|
|
run_command = run_command.format(
|
|
out=output_file_path,
|
|
args=shlex.join(self.args.run_args),
|
|
)
|
|
|
|
try:
|
|
# Run command
|
|
self.log_info(f'Running command for target {target}:')
|
|
self.log_info(f' {run_command}')
|
|
run_result = subprocess.run(run_command, shell=True)
|
|
|
|
if run_result.returncode > 0:
|
|
self.log_error(f'Run command exited with return code {run_result.returncode}')
|
|
exit(1)
|
|
except KeyboardInterrupt:
|
|
self.log_info('Keyboard interrupt')
|
|
|
|
def clean_project(self, target: str) -> None:
|
|
ninja_file_path = self.config.get_ninja_file_path(target)
|
|
|
|
if not ninja_file_path.exists():
|
|
self.log_info(f'Ninja file {ninja_file_path} not found, nothing to clean up')
|
|
return
|
|
|
|
self.log_info(f'Cleaning up build files for target {target}')
|
|
self.call_ninja(target, '-t', 'clean')
|
|
|
|
self.log_info(f'Removing Ninja file {ninja_file_path}')
|
|
ninja_file_path.unlink(missing_ok=True)
|
|
|
|
# Remove build.ninja symlink if it points to the same Ninja file
|
|
ninja_symlink = Path('build.ninja')
|
|
if ninja_symlink.is_symlink() and ninja_symlink.resolve() == ninja_file_path.absolute():
|
|
self.log_info(f'Removing build.ninja symlink')
|
|
ninja_symlink.unlink()
|
|
|
|
def dump_config(self) -> None:
|
|
target = self.get_selected_target()
|
|
|
|
self.log_info('Dumping fully parsed config:')
|
|
print(json.dumps(asdict(self.config), indent=4, default=str))
|
|
|
|
self.log_info(f'Dumping effective build target config for target "{target}":')
|
|
merged_target_config = self.config.get_merged_target_config(target)
|
|
print(json.dumps(asdict(merged_target_config), indent=4, default=str))
|
|
|
|
def generate_dyndeps_from_p1689(self, target: str) -> None:
|
|
# Parse P1689 build dependency JSON file from stdin (generated by clang-scan-deps)
|
|
parsed_p1689 = json.load(sys.stdin)
|
|
|
|
# Parse rules and get module dependencies
|
|
build_dependency_parser = BuildDependencyParser(config=self.config, target=target)
|
|
build_dependencies = build_dependency_parser.parse_build_dependencies(parsed_p1689)
|
|
|
|
# Generate Ninja dyndep file and write to stdout
|
|
print('ninja_dyndep_version = 1\n')
|
|
for out, deps in build_dependencies.items():
|
|
if deps:
|
|
print(f'build {out}: dyndep | {" ".join(deps)}')
|
|
else:
|
|
print(f'build {out}: dyndep')
|
|
|
|
|
|
CompileTargetCpp = namedtuple('CompileTargetCpp', ['src', 'obj'])
|
|
CompileTargetCppm = namedtuple('CompileTargetCppm', ['src', 'obj', 'pcm'])
|
|
|
|
|
|
class NinjaFileGenerator:
|
|
config: ShurikenConfig
|
|
target: str
|
|
target_config: TargetConfig
|
|
|
|
compile_targets_cpp: list[CompileTargetCpp]
|
|
compile_targets_cppm: list[CompileTargetCppm]
|
|
link_target_obj_files: list[str]
|
|
|
|
def __init__(self, *, config: ShurikenConfig, target: str):
|
|
self.config = config
|
|
self.target = target
|
|
self.target_config = config.get_merged_target_config(target)
|
|
|
|
def generate_file(self) -> str:
|
|
self.collect_compile_targets()
|
|
return self.render_file()
|
|
|
|
def find_source_files(self, glob: str) -> list[Path]:
|
|
return sorted(self.config.source_dir.glob(glob))
|
|
|
|
def collect_compile_targets(self) -> None:
|
|
self.compile_targets_cpp = []
|
|
self.compile_targets_cppm = []
|
|
|
|
# Compile targets for .cpp files
|
|
for src_file in self.find_source_files('**/*.cpp'):
|
|
relative_name = src_file.relative_to(self.config.source_dir)
|
|
self.compile_targets_cpp.append(CompileTargetCpp(
|
|
src=str(src_file),
|
|
obj=f"$builddir/obj/{relative_name.with_suffix('.o')}",
|
|
))
|
|
|
|
# Compile targets for .cppm files
|
|
for src_file in self.find_source_files('**/*.cppm'):
|
|
relative_name = src_file.relative_to(self.config.source_dir)
|
|
self.compile_targets_cppm.append(CompileTargetCppm(
|
|
src=str(src_file),
|
|
obj=f"$builddir/obj/{relative_name.with_suffix('.o')}",
|
|
pcm=f"$builddir/pcm/{'.'.join(relative_name.with_suffix('.pcm').parts)}",
|
|
))
|
|
|
|
# Link target for output file (executable): gather all object files
|
|
self.link_target_obj_files = sorted(
|
|
target.obj for target in self.compile_targets_cpp + self.compile_targets_cppm
|
|
)
|
|
|
|
def render_file(self) -> str:
|
|
# Get script path to run shuriken
|
|
shuriken_script_path = sys.argv[0]
|
|
|
|
# Shortcut variables
|
|
config = self.config
|
|
target = self.target
|
|
target_config = self.target_config
|
|
build_dir = self.config.build_dir
|
|
|
|
# Generate Ninja file
|
|
return f"""
|
|
ninja_required_version = 1.10
|
|
|
|
# Build directory and paths
|
|
builddir = {build_dir / target}
|
|
compdb_file = {build_dir}/compile_commands.json
|
|
dyndep_file = $builddir/.ninja_dyndep
|
|
|
|
# Compiler flags
|
|
cpp_flags = -std={target_config.cpp_standard} -fprebuilt-module-path=$builddir/pcm/ {target_config.merged_cpp_flags}
|
|
linker_flags = {target_config.merged_linker_flags}
|
|
linker_args = {target_config.merged_linker_args}
|
|
|
|
# -- RULES
|
|
|
|
# Rule to compile a .cpp file to a .o file
|
|
rule cpp
|
|
command = {target_config.cpp_compiler} $cpp_flags -c $in -o $out
|
|
|
|
# Rule to compile a .cppm file (C++20 module) to a .o file ($out) and a .pcm file ($pcm_out)
|
|
rule cppm
|
|
command = {target_config.cpp_compiler} $cpp_flags -c $in -o $out -fmodule-output=$pcm_out
|
|
|
|
# Rule to link several .o files to an executable
|
|
rule link
|
|
command = {target_config.cpp_compiler} $linker_flags -o $out $in $linker_args
|
|
|
|
# Rule to generate a compilation database (JSON file) from the build targets defined in a Ninja file
|
|
rule generate_compdb
|
|
command = ninja -f $in -t compdb cpp cppm > $out
|
|
|
|
# Rule to generate a Ninja dyndep file for C++ module dependencies based on a compilation database
|
|
rule generate_dyndeps
|
|
command = clang-scan-deps -format=p1689 -compilation-database=$in | {shuriken_script_path} -t {target} p1689-to-dyndeps > $out
|
|
|
|
|
|
# -- STATIC BUILD TARGETS
|
|
|
|
# Generate compilation database from default target Ninja file
|
|
build $compdb_file: generate_compdb {self.config.get_ninja_file_path(config.default_target)}
|
|
|
|
# Shortcut alias to generate compilation database
|
|
build compdb: phony $compdb_file
|
|
|
|
# Generate Ninja dyndep file from compilation database
|
|
build $dyndep_file: generate_dyndeps $compdb_file
|
|
|
|
|
|
# -- GENERATED BUILD TARGETS
|
|
|
|
# Link output file (default target)
|
|
build $builddir/{target_config.output_file}: link {' '.join(self.link_target_obj_files)}
|
|
default $builddir/{target_config.output_file}
|
|
|
|
{self.render_compile_targets()}
|
|
|
|
# End of build.{target}.ninja
|
|
""".strip()
|
|
|
|
def render_compile_targets(self) -> str:
|
|
rendered_targets_cpp = [self.render_compile_target_cpp(target) for target in self.compile_targets_cpp]
|
|
rendered_targets_cppm = [self.render_compile_target_cppm(target) for target in self.compile_targets_cppm]
|
|
return '\n\n'.join(rendered_targets_cpp + rendered_targets_cppm)
|
|
|
|
@staticmethod
|
|
def render_compile_target_cpp(target: CompileTargetCpp) -> str:
|
|
return f"""
|
|
build {target.obj}: cpp {target.src} || $dyndep_file
|
|
dyndep = $dyndep_file
|
|
""".strip()
|
|
|
|
@staticmethod
|
|
def render_compile_target_cppm(target: CompileTargetCppm) -> str:
|
|
return f"""
|
|
build {target.obj} | {target.pcm}: cppm {target.src} || $dyndep_file
|
|
dyndep = $dyndep_file
|
|
pcm_out = {target.pcm}
|
|
""".strip()
|
|
|
|
|
|
class BuildDependencyParser:
|
|
config: ShurikenConfig
|
|
target: str
|
|
|
|
def __init__(self, config: ShurikenConfig, target: str):
|
|
self.config = config
|
|
self.target = target
|
|
|
|
def parse_build_dependencies(self, parsed_p1689: dict[str, Any]) -> dict[str, list[str]]:
|
|
# Shortcut variables
|
|
source_dir = self.config.source_dir
|
|
target_build_dir = self.config.build_dir / self.target
|
|
default_target_build_dir = self.config.build_dir / self.config.default_target
|
|
|
|
# Validate module name and path convention, construct map of module names to PCM file paths
|
|
module_map = {}
|
|
for rule in parsed_p1689['rules']:
|
|
provides = rule.get('provides', [])
|
|
if provides:
|
|
assert len(provides) == 1, f"Rule provides more than one module: {rule}"
|
|
assert provides[0]['is-interface'] is True, f"Rule provides non-interface module: {rule}"
|
|
|
|
module_name: str = provides[0]['logical-name']
|
|
assert module_name not in module_map, f"Module {module_name} is provided more than once: {rule}"
|
|
|
|
expected_source_path = str(Path(source_dir, module_name.replace('.', '/') + '.cppm'))
|
|
assert provides[0]['source-path'] == expected_source_path, f"Module name does not match source path: {rule}"
|
|
|
|
module_map[module_name] = str(Path(target_build_dir, 'pcm', module_name + '.pcm'))
|
|
|
|
# Parse rules and get module dependencies
|
|
build_dependencies = {}
|
|
for rule in parsed_p1689['rules']:
|
|
# Compilation database contains "build/{default_target}" paths, we need to map them to "build/{target}"
|
|
out = target_build_dir / Path(rule['primary-output']).relative_to(default_target_build_dir)
|
|
|
|
build_dependencies[out] = []
|
|
for require in rule.get('requires', []):
|
|
module_name = require['logical-name']
|
|
assert module_name in module_map, f'Module {module_name} not found in module map'
|
|
build_dependencies[out].append(module_map[module_name])
|
|
|
|
return build_dependencies
|
|
|
|
|
|
if __name__ == '__main__':
|
|
ShurikenCli().run()
|