Refactors project structure for modularity

Renames and reorganizes modules under a new 'hdlbuild' namespace for improved clarity and maintainability. Updates import paths across the codebase to reflect the new structure.

No logic changes introduced.
This commit is contained in:
2025-04-26 18:52:44 +00:00
parent f9d5e3c535
commit 28406028c6
18 changed files with 47 additions and 47 deletions

0
src/hdlbuild/__init__.py Normal file
View File

81
src/hdlbuild/cli.py Normal file
View File

@@ -0,0 +1,81 @@
import argparse
import sys
from hdlbuild.dependencies.resolver import DependencyResolver
from hdlbuild.tools.xilinx_ise.main import xilinx_ise_all, xilinx_ise_synth
from hdlbuild.utils.console_utils import ConsoleUtils
from hdlbuild.utils.directory_manager import clear_build_directories, clear_directories, ensure_directories_exist
from hdlbuild.utils.project_loader import load_project_config
project = load_project_config()
console_utils = ConsoleUtils("hdlbuild")
def clear(args):
"""Clears the build artifacts."""
if args.target == "all":
console_utils.print("Starting clear all process...")
clear_directories()
console_utils.print("All cleared.")
else:
console_utils.print("Clearing build artifacts...")
clear_build_directories()
console_utils.print("Build artifacts cleared.")
def build(args):
"""Starts the build process."""
console_utils.print("Starting build process...")
ensure_directories_exist(True)
xilinx_ise_all(project)
def synth(args):
"""Starts the build process."""
console_utils.print("Starting build process...")
ensure_directories_exist()
xilinx_ise_synth(project)
def dep(args):
"""Starts the dependencies process."""
console_utils.print("Starting dependencies process...")
DependencyResolver(project).resolve_all()
def main():
parser = argparse.ArgumentParser(
description="hdlbuild - Build management tool for FPGA projects",
formatter_class=argparse.RawTextHelpFormatter
)
subparsers = parser.add_subparsers(
title="Commands",
description="Available commands",
dest="command",
required=True
)
# Clear command
parser_clear = subparsers.add_parser("clear", help="Clear build artifacts")
parser_clear.add_argument(
"target",
nargs="?",
choices=["all"],
help="Specify 'all' to clear everything (optional)"
)
parser_clear.set_defaults(func=clear)
# Build command
parser_build = subparsers.add_parser("build", help="Start the build process")
parser_build.set_defaults(func=build)
# Synth command
parser_build = subparsers.add_parser("synth", help="Start the synth process")
parser_build.set_defaults(func=synth)
# Dependencies command
parser_build = subparsers.add_parser("dep", help="Start the dependencies process")
parser_build.set_defaults(func=dep)
args = parser.parse_args()
args.func(args)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,99 @@
# src/hdlbuild/dependency/resolver.py
from typing import List, Set
from git import Repo
from hdlbuild.models.config import DIRECTORIES, GIT
from hdlbuild.models.project import ProjectConfig
from hdlbuild.models.dependency import ResolvedDependency
import os
from hdlbuild.utils.console_utils import ConsoleUtils
from hdlbuild.utils.project_loader import load_project_config
class DependencyResolver:
def __init__(self, root_project: ProjectConfig, offline_mode: bool = False):
self.root_project = root_project
self.offline_mode = offline_mode
self.resolved: List[ResolvedDependency] = []
self.visited_urls: Set[str] = set()
self.console = ConsoleUtils(live=True)
self.console.start_live()
def resolve_all(self):
"""Startet das Auflösen aller Abhängigkeiten (rekursiv)."""
self._resolve_project(self.root_project)
self.console.stop_live("[bold green]Alle Abhängigkeiten aufgelöst.[/bold green]")
def _resolve_project(self, project: ProjectConfig):
"""Löst die Abhängigkeiten eines einzelnen Projekts auf."""
for dep in project.dependencies or []:
if dep.git in self.visited_urls:
continue
self.visited_urls.add(dep.git)
local_path = self._clone_or_use_existing(dep.git, dep.rev)
dep_project = self._load_project_config(os.path.join(local_path, "project.yml"))
# Speichern als ResolvedDependency
self.resolved.append(ResolvedDependency(project=dep_project, local_path=local_path))
self._resolve_project(dep_project)
def _clone_or_use_existing(self, git_url: str, rev: str) -> str:
folder_name = os.path.basename(git_url.rstrip("/")).replace(".git", "")
local_path = os.path.join(DIRECTORIES.dependency, folder_name)
if os.path.exists(local_path):
# Lokales Repo vorhanden
self.console.print(f"[bold green]Benutze vorhandenes Repository: {folder_name}[/bold green]")
repo = Repo(local_path)
if not self.offline_mode:
try:
self.console.print(f"[bold green]Aktualisiere {folder_name}...[/bold green]")
# Fetch Remote Updates
repo.remotes.origin.fetch()
# Prüfen, ob HEAD und origin/branch unterschiedlich sind
local_commit = repo.head.commit
remote_ref = repo.remotes.origin.refs[repo.active_branch.name]
remote_commit = remote_ref.commit
if local_commit.hexsha != remote_commit.hexsha:
self.console.print(f"[bold yellow]Änderungen erkannt! Force-Pull wird durchgeführt...[/bold yellow]")
repo.git.reset('--hard', remote_commit.hexsha)
else:
self.console.print(f"[bold green]Repository {folder_name} ist aktuell.[/bold green]")
except Exception as e:
self.console.print(f"[bold red]Warnung beim Aktualisieren: {e}[/bold red]")
else:
# Lokales Repo fehlt → nur dann klonen
if self.offline_mode:
raise FileNotFoundError(f"Repository {folder_name} existiert lokal nicht und offline_mode ist aktiv.")
else:
self.console.print(f"[bold green]Klone {git_url}...[/bold green]")
repo = Repo.clone_from(git_url, local_path)
# Immer: Auf den richtigen Commit/Branch wechseln
self.console.print(f"[bold green]Checkout auf[/bold green] [yellow]{rev}[/yellow] in {folder_name}")
repo.git.checkout(rev)
return local_path
def _load_project_config(self, path: str) -> ProjectConfig:
"""
Lädt eine project.yml aus einem lokalen Ordner.
Args:
path (str): Basisverzeichnis des geklonten Projekts.
Returns:
ProjectConfig: Das geladene Projekt.
"""
self.console.print(f"Lade project.yml aus {path}...")
return load_project_config(path)

View File

@@ -0,0 +1,26 @@
import os
from pydantic import BaseModel
class DirectoryConfig(BaseModel):
dependency: str = ".hdlbuild_deps"
build: str = ".working"
report: str = "reports"
copy_target: str = "output"
def get_relative_prefix(self) -> str:
"""
Gibt den relativen Pfad von build-Verzeichnis zurück zum Hauptverzeichnis.
Beispiel:
".working" -> "../"
".build/deep" -> "../../"
"""
depth = len(os.path.normpath(self.build).split(os.sep))
return "../" * depth
DIRECTORIES = DirectoryConfig()
class GitConfig(BaseModel):
timeout: int = 10
GIT = GitConfig()

View File

@@ -0,0 +1,8 @@
# models/dependency.py
from pydantic import BaseModel
from hdlbuild.models.project import ProjectConfig
class ResolvedDependency(BaseModel):
project: ProjectConfig
local_path: str

View File

@@ -0,0 +1,47 @@
from pydantic import BaseModel, Field
from typing import List, Optional
class SourceFile(BaseModel):
path: str
library: str = "work" # Default auf 'work'
class ToolOptions(BaseModel):
common: List[str] = Field(default_factory=list)
xst: List[str] = Field(default_factory=list)
ngdbuild: List[str] = Field(default_factory=list)
map: List[str] = Field(default_factory=list)
par: List[str] = Field(default_factory=list)
bitgen: List[str] = Field(default_factory=list)
trace: List[str] = Field(default_factory=list)
fuse: List[str] = Field(default_factory=list)
class Dependency(BaseModel):
name: Optional[str] = None # Name ist jetzt optional
git: str
rev: str
library: str = "work" # Default auf 'work'
class Sources(BaseModel):
vhdl: List[SourceFile] = Field(default_factory=list)
verilog: List[SourceFile] = Field(default_factory=list)
class Testbenches(BaseModel):
vhdl: List[SourceFile] = Field(default_factory=list)
verilog: List[SourceFile] = Field(default_factory=list)
class BuildOptions(BaseModel):
build_dir: Optional[str] = "working"
report_dir: Optional[str] = "reports"
copy_target_dir: Optional[str] = "output"
class ProjectConfig(BaseModel):
name: str
topmodule: Optional[str]
target_device: str
xilinx_path: str
sources: Sources
testbenches: Optional[Testbenches] = None
constraints: Optional[str] = None
build: Optional[BuildOptions] = None
dependencies: Optional[List[Dependency]] = Field(default_factory=list)
tool_options: Optional[ToolOptions] = ToolOptions()

View File

@@ -0,0 +1,29 @@
import subprocess
import os
import shutil
from typing import Optional
from hdlbuild.models.project import ProjectConfig
from hdlbuild.models.config import DIRECTORIES
from hdlbuild.tools.xilinx_ise.common import copy_file, run_tool
def run_bitgen(project: ProjectConfig):
run_tool(
project=project,
tool_executable_name="bitgen",
tool_option_attr="bitgen",
mandatory_arguments=[
"-w",
f"{project.name}.ncd",
f"{project.name}.bit"
], step_number=9, total_steps=12
)
def copy_bitstream_file(project: ProjectConfig):
copy_file(
project=project,
source_filename=f"{project.name}.bit",
destination_filename=f"{project.name}.Bitstream",
description="Bitstream File",
step_number=10, total_steps=12
)

View File

@@ -0,0 +1,72 @@
import shutil
import os
from typing import Optional, List
from hdlbuild.models.project import ProjectConfig
from hdlbuild.models.config import DIRECTORIES
from hdlbuild.utils.console_utils import ConsoleTask, ConsoleUtils
from rich.console import Console
def run_tool(
project: ProjectConfig,
tool_executable_name: str,
mandatory_arguments: List[str],
tool_option_attr: Optional[str] = None,
working_dir: Optional[str] = None,
silent: bool = False,
step_number: Optional[int] = None,
total_steps: Optional[int] = None
):
if working_dir is None:
working_dir = DIRECTORIES.build
xilinx_bin_dir = os.path.join(project.xilinx_path, "bin", "lin64")
tool_executable = os.path.join(xilinx_bin_dir, tool_executable_name)
if not os.path.exists(tool_executable):
raise FileNotFoundError(f"Executable nicht gefunden: {tool_executable}")
cmd = [tool_executable]
if project.tool_options and project.tool_options.common:
cmd.extend(project.tool_options.common)
if tool_option_attr and project.tool_options:
tool_opts = getattr(project.tool_options, tool_option_attr, [])
if tool_opts:
cmd.extend(tool_opts)
cmd.extend(mandatory_arguments)
task = ConsoleTask("hdlbuild", tool_executable_name.upper(), step_number, total_steps)
task.run_command(cmd, cwd=working_dir, silent=silent)
def copy_file(
project: ProjectConfig,
source_filename: str,
destination_filename: str,
description: str = "Report",
step_number: Optional[int] = None,
total_steps: Optional[int] = None
):
"""
Kopiert eine beliebige Report-Datei vom Build- in das Report-Verzeichnis.
Args:
project (ProjectConfig): Geladene Projektkonfiguration
source_filename (str): Name der Quelldatei im Build-Ordner
destination_filename (str): Neuer Name der Zieldatei im Report-Ordner
description (str): Optionale Beschreibung für die Ausgabe (z.B. "Synthesis Report")
"""
src_path = os.path.join(DIRECTORIES.build, source_filename)
dst_path = os.path.join(DIRECTORIES.report, destination_filename)
if not os.path.exists(src_path):
raise FileNotFoundError(f"{description} nicht gefunden: {src_path}")
os.makedirs(DIRECTORIES.report, exist_ok=True)
shutil.copyfile(src_path, dst_path)
util = ConsoleUtils("hdlbuild", step_number, total_steps)
util.print(f"{description} kopiert nach {dst_path}")

View File

@@ -0,0 +1,34 @@
from hdlbuild.models.config import DIRECTORIES
from hdlbuild.models.project import ProjectConfig
from hdlbuild.tools.xilinx_ise.bitgen import copy_bitstream_file, run_bitgen
from hdlbuild.tools.xilinx_ise.map import copy_map_report, run_map
from hdlbuild.tools.xilinx_ise.ngdbuild import run_ngdbuild
from hdlbuild.tools.xilinx_ise.par import copy_par_report, copy_pinout_report, run_par
from hdlbuild.tools.xilinx_ise.trace import copy_trace_report, run_trace
from hdlbuild.tools.xilinx_ise.xst import copy_synthesis_report, generate_xst_project_file, generate_xst_script_file, run_xst
def xilinx_ise_synth(project: ProjectConfig):
generate_xst_project_file(project, f"{DIRECTORIES.build}/{project.name}.prj")
generate_xst_script_file(project, f"{DIRECTORIES.build}/{project.name}.scr")
run_xst(project)
copy_synthesis_report(project)
def xilinx_ise_all(project: ProjectConfig):
xilinx_ise_synth(project)
run_ngdbuild(project)
run_map(project)
copy_map_report(project)
run_par(project)
copy_par_report(project)
copy_pinout_report(project)
run_bitgen(project)
copy_bitstream_file(project)
run_trace(project)
copy_trace_report(project)

View File

@@ -0,0 +1,30 @@
import subprocess
import os
import shutil
from typing import Optional
from hdlbuild.models.project import ProjectConfig
from hdlbuild.models.config import DIRECTORIES
from hdlbuild.tools.xilinx_ise.common import copy_file, run_tool
def run_map(project: ProjectConfig):
run_tool(
project=project,
tool_executable_name="map",
tool_option_attr="map",
mandatory_arguments=[
"-p", project.target_device,
"-w",
f"{project.name}.ngd",
"-o", f"{project.name}.map.ncd",
f"{project.name}.pcf"
], step_number=4, total_steps=12
)
def copy_map_report(project: ProjectConfig):
copy_file(
project=project,
source_filename=f"{project.name}.map.mrp",
destination_filename=f"{project.name}.MapReport",
description="Map Report",
step_number=5, total_steps=12
)

View File

@@ -0,0 +1,19 @@
import subprocess
import os
from typing import Optional
from hdlbuild.models.project import ProjectConfig
from hdlbuild.models.config import DIRECTORIES
from hdlbuild.tools.xilinx_ise.common import run_tool
def run_ngdbuild(project: ProjectConfig):
run_tool(
project=project,
tool_executable_name="ngdbuild",
tool_option_attr="ngdbuild",
mandatory_arguments=[
"-p", project.target_device,
"-uc", f"{DIRECTORIES.get_relative_prefix()}{project.constraints}",
f"{project.name}.ngc",
f"{project.name}.ngd"
], step_number=3, total_steps=12
)

View File

@@ -0,0 +1,38 @@
import subprocess
import shutil
import os
from typing import Optional
from hdlbuild.models.project import ProjectConfig
from hdlbuild.models.config import DIRECTORIES
from hdlbuild.tools.xilinx_ise.common import copy_file, run_tool
def run_par(project: ProjectConfig):
run_tool(
project=project,
tool_executable_name="par",
tool_option_attr="par",
mandatory_arguments=[
"-w",
f"{project.name}.map.ncd",
f"{project.name}.ncd",
f"{project.name}.pcf"
], step_number=6, total_steps=12
)
def copy_par_report(project: ProjectConfig):
copy_file(
project=project,
source_filename=f"{project.name}.par",
destination_filename=f"{project.name}.PlaceRouteReport",
description="Place & Route Report",
step_number=7, total_steps=12
)
def copy_pinout_report(project: ProjectConfig):
copy_file(
project=project,
source_filename=f"{project.name}_pad.txt",
destination_filename=f"{project.name}.PinoutReport",
description="Pinout Report",
step_number=8, total_steps=12
)

View File

@@ -0,0 +1,27 @@
import subprocess
import os
import shutil
from typing import Optional
from hdlbuild.models.project import ProjectConfig
from hdlbuild.models.config import DIRECTORIES
from hdlbuild.tools.xilinx_ise.common import copy_file, run_tool
def run_trace(project: ProjectConfig):
run_tool(
project=project,
tool_executable_name="trce",
tool_option_attr="trace",
mandatory_arguments=[
f"{project.name}.ncd",
f"{project.name}.pcf",
], step_number=11, total_steps=12
)
def copy_trace_report(project: ProjectConfig):
copy_file(
project=project,
source_filename=f"{project.name}.twr",
destination_filename=f"{project.name}.TimingReport",
description="Timing Report",
step_number=12, total_steps=12
)

View File

@@ -0,0 +1,67 @@
from typing import Optional
from hdlbuild.dependencies.resolver import DependencyResolver
from hdlbuild.models.config import DIRECTORIES
from hdlbuild.tools.xilinx_ise.common import copy_file, run_tool
from hdlbuild.utils.source_resolver import expand_all_sources
from hdlbuild.models.project import ProjectConfig
import subprocess
import os
import shutil
def generate_xst_project_file(project: ProjectConfig, output_path: str):
"""
Generiert die XST .prj-Datei mit allen Quellcodes.
"""
with open(output_path, "w") as f:
resolver = DependencyResolver(project, offline_mode=True)
resolver.resolve_all()
vhdl_sources, verilog_sources = expand_all_sources(project, resolver.resolved)
for lib, file in vhdl_sources:
f.write(f"vhdl {lib} \"{DIRECTORIES.get_relative_prefix()}{file}\"\n")
for lib, file in verilog_sources:
f.write(f"verilog {lib} \"{DIRECTORIES.get_relative_prefix()}{file}\"\n")
# Optionale Dependencies
if project.dependencies:
for dep in project.dependencies:
# Hier könnte man noch spezielle Sources aus dep.path expandieren
pass
def generate_xst_script_file(project: ProjectConfig, output_path: str):
"""
Generiert die XST .scr-Datei mit den Synthese-Optionen.
"""
with open(output_path, "w") as f:
f.write(f"run ")
f.write(f"-ifn {project.name}.prj ")
f.write(f"-ofn {project.name}.ngc ")
f.write(f"-ifmt mixed ")
if project.tool_options and project.tool_options.xst:
for opt in project.tool_options.xst:
f.write(f"{opt} ")
f.write(f"-top {project.topmodule} ")
f.write(f"-ofmt NGC ")
f.write(f"-p {project.target_device} ")
def run_xst(project: ProjectConfig):
run_tool(
project=project,
tool_executable_name="xst",
mandatory_arguments=["-ifn", f"{project.name}.scr",
], step_number=1, total_steps=12
)
def copy_synthesis_report(project: ProjectConfig):
copy_file(
project=project,
source_filename=f"{project.name}.srp",
destination_filename=f"{project.name}.SynthesisReport",
description="Synthesebericht",
step_number=2, total_steps=12
)

View File

@@ -0,0 +1,176 @@
import sys
import threading
import time
import subprocess
from typing import List, Optional
from rich.console import Console
from rich.live import Live
from rich.text import Text
from rich.markup import render
class ConsoleTask:
def __init__(self, prefix:str, title: str, step_number: Optional[int] = None, total_steps: Optional[int] = None, max_log_lines: int = 10):
self.prefix = prefix
self.title = title
self.step_number = step_number
self.total_steps = total_steps
self.max_log_lines = max_log_lines
self.spinner_cycle = ['|', '/', '-', '\\']
self.stop_event = threading.Event()
self.spinner_thread: Optional[threading.Thread] = None
self.output_lines: List[str] = []
self.all_lines: List[str] = []
self._stdout_lock = threading.Lock()
self.console = Console()
self.live: Optional[Live] = None
self.spinner_idx = 0
def start_spinner(self):
self.live = Live(console=self.console, refresh_per_second=30, transient=True)
self.live.start()
self.spinner_thread = threading.Thread(target=self._spinner_task, daemon=True)
self.spinner_thread.start()
def _spinner_task(self):
while not self.stop_event.is_set():
with self._stdout_lock:
self._redraw_spinner()
self.spinner_idx += 1
time.sleep(0.1)
def _render_content(self) -> Text:
visible_lines = self.output_lines[-self.max_log_lines:]
prefix_text = f"[grey50]\[{self.prefix}][/grey50]" if self.prefix else ""
step_text = f"[bold blue]Step {self.step_number}/{self.total_steps}[/bold blue]" if self.step_number and self.total_steps else ""
title_text = f"[bold]{self.title}[/bold]" if self.title else ""
spinner_markup = f"{prefix_text} {step_text} {title_text} {self.spinner_cycle[self.spinner_idx % len(self.spinner_cycle)]}"
spinner_text = Text.from_markup(spinner_markup)
log_text = Text("\n".join(visible_lines))
full_text = spinner_text + Text("\n") + log_text
return full_text
def _redraw_spinner(self):
if self.live:
self.live.update(self._render_content())
def log(self, message: str):
with self._stdout_lock:
self.all_lines.append(message)
self.output_lines.append(message)
if len(self.output_lines) > self.max_log_lines:
self.output_lines = self.output_lines[-self.max_log_lines:]
if self.live:
self.live.update(self._render_content())
def run_command(self, cmd: List[str], cwd: Optional[str] = None, silent: bool = False) -> int:
success = False
start_time = time.time()
self.start_spinner()
try:
if silent:
subprocess.run(cmd, cwd=cwd, check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
success = True
else:
process = subprocess.Popen(cmd, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True)
if process.stdout is None:
raise ValueError("Failed to capture stdout")
while True:
line = process.stdout.readline()
if not line and process.poll() is not None:
break
if line:
self.log(line.rstrip())
success = (process.returncode == 0)
finally:
self.stop_event.set()
if self.spinner_thread:
self.spinner_thread.join()
duration = time.time() - start_time
with self._stdout_lock:
self._finalize_output(success, duration)
if not success:
# Schöne Fehlerausgabe und kontrolliertes Beenden
self.console.print("\n[bold red]❌ Fehler beim Ausführen des Kommandos:[/bold red]")
for line in self.all_lines:
self.console.print(f"[red]{line}[/red]")
sys.exit(1) # ❗ Hier: hartes, aber sauberes Beenden des Programms
return 0
def _finalize_output(self, success: bool, duration: float):
if self.live:
self.live.stop()
prefix_text = f"[grey50]\[{self.prefix}][/grey50]" if self.prefix else ""
status_symbol = "[green]✅[/green]" if success else "[red]❌[/red]"
step_text = f"[bold blue]Step {self.step_number}/{self.total_steps}[/bold blue]" if self.step_number and self.total_steps else ""
status_title = f"[bold green]{self.title}[/bold green]" if success else f"[bold red]{self.title}[/bold red]"
final_line = f"{prefix_text} {step_text} {status_title} {status_symbol} [bold green]({duration:.1f}s[/bold green])"
# Final full output
self.console.print(final_line)
class ConsoleUtils:
def __init__(
self,
prefix: str = "hdlbuild",
step_number: Optional[int] = None,
total_steps: Optional[int] = None,
live: bool = False
):
self.prefix = prefix
self.step_number = step_number
self.total_steps = total_steps
self.console = Console()
self.live_mode = live
self.live: Optional[Live] = None
self.messages: List[str] = []
def start_live(self):
"""Startet den Live-Modus."""
if self.live_mode and self.live is None:
self.live = Live(console=self.console, refresh_per_second=10, transient=True)
self.live.start()
def print(self, message: str):
prefix = f"[grey50]\[{self.prefix}][/grey50]" if self.prefix else ""
step_info = f"[bold blue]Step {self.step_number}/{self.total_steps}[/bold blue]" if self.step_number and self.total_steps else ""
full_message = f"{prefix} {step_info} {message}"
if self.live_mode and self.live:
self.messages.append(full_message)
rendered_lines = [Text.from_markup(line) for line in self.messages]
combined = Text()
for line in rendered_lines:
combined.append(line)
combined.append("\n")
self.live.update(combined)
else:
self.console.print(full_message)
def stop_live(self, final_message: Optional[str] = None):
"""Beendet den Live-Modus, löscht alte Ausgaben und zeigt eine Abschlussnachricht."""
if self.live_mode and self.live:
self.live.stop()
self.live = None
self.messages.clear() # Alte Messages verwerfen
if final_message:
self.console.print(final_message)

View File

@@ -0,0 +1,57 @@
import os
import shutil
from hdlbuild.models.config import DIRECTORIES
from hdlbuild.utils.console_utils import ConsoleUtils
def ensure_directories_exist(silent: bool = False):
"""
Erstellt alle in der Konfiguration definierten Verzeichnisse, falls sie nicht existieren.
"""
console_utils = None
if not silent:
console_utils = ConsoleUtils("hdlbuild")
for name, path in DIRECTORIES.dict().items():
if not os.path.exists(path):
os.makedirs(path, exist_ok=True)
if not silent and console_utils:
console_utils.print(f"Verzeichnis erstellt: {path}")
else:
if not silent and console_utils:
console_utils.print(f"[hdlbuild] Verzeichnis vorhanden: {path}")
def clear_directories(silent: bool = False):
"""
Löscht alle in der Konfiguration definierten Verzeichnisse, falls sie existieren.
"""
console_utils = None
if not silent:
console_utils = ConsoleUtils("hdlbuild")
for name, path in DIRECTORIES.dict().items():
if os.path.exists(path):
if not silent and console_utils:
console_utils.print(f"Lösche Verzeichnis: {path}")
shutil.rmtree(path)
else:
if not silent and console_utils:
console_utils.print(f"Verzeichnis nicht vorhanden, übersprungen: {path}")
def clear_build_directories(silent: bool = False):
"""
Löscht alle in der Konfiguration definierten Verzeichnisse, falls sie existieren.
"""
console_utils = None
if not silent:
console_utils = ConsoleUtils("hdlbuild")
for name, path in DIRECTORIES.dict().items():
if name == "dependency":
continue
if os.path.exists(path):
if not silent and console_utils:
console_utils.print(f"Lösche Verzeichnis: {path}")
shutil.rmtree(path)
else:
if not silent and console_utils:
console_utils.print(f"Verzeichnis nicht vorhanden, übersprungen: {path}")

View File

@@ -0,0 +1,16 @@
import yaml
from hdlbuild.models.project import ProjectConfig
def load_project_config(path: str = "project.yml") -> ProjectConfig:
"""
Lädt die Projektkonfiguration aus einer YAML-Datei und gibt ein typisiertes ProjectConfig-Objekt zurück.
Args:
path (str): Pfad zur project.yml Datei (Default: "project.yml")
Returns:
ProjectConfig: Geparstes und typisiertes Projektkonfigurationsobjekt
"""
with open(path, "r") as file:
raw_data = yaml.safe_load(file)
return ProjectConfig(**raw_data)

View File

@@ -0,0 +1,68 @@
# src/hdlbuild/utils/source_resolver.py
import glob
import os
from typing import List, Tuple
from hdlbuild.models.project import SourceFile, ProjectConfig
from hdlbuild.models.dependency import ResolvedDependency
def _expand_project_sources(project: ProjectConfig, project_root: str) -> Tuple[List[Tuple[str, str]], List[Tuple[str, str]]]:
"""
Expandiert die Quellen eines einzelnen Projekts, getrennt nach VHDL und Verilog.
Args:
project (ProjectConfig): Das Projekt, dessen Quellen expandiert werden sollen.
project_root (str): Basisverzeichnis, von dem aus die Pfade aufgelöst werden.
Returns:
Tuple: (List of (library, filepath) für VHDL, List of (library, filepath) für Verilog)
"""
vhdl_expanded = []
verilog_expanded = []
# VHDL-Sources
for source in project.sources.vhdl:
full_pattern = os.path.join(project_root, source.path)
matched_files = glob.glob(full_pattern, recursive=True)
for file in matched_files:
normalized_path = os.path.normpath(file)
vhdl_expanded.append((source.library, normalized_path))
# Verilog-Sources
for source in project.sources.verilog:
full_pattern = os.path.join(project_root, source.path)
matched_files = glob.glob(full_pattern, recursive=True)
for file in matched_files:
normalized_path = os.path.normpath(file)
verilog_expanded.append((source.library, normalized_path))
return vhdl_expanded, verilog_expanded
def expand_all_sources(root_project: ProjectConfig, resolved_dependencies: List[ResolvedDependency]) -> Tuple[List[Tuple[str, str]], List[Tuple[str, str]]]:
"""
Expandiert alle Quellen aus dem Root-Projekt und allen Dependencies, getrennt nach VHDL und Verilog.
Args:
root_project (ProjectConfig): Das Hauptprojekt
resolved_dependencies (List[ResolvedDependency]): Alle rekursiv aufgelösten Dependencies
Returns:
Tuple:
- List of (library, filepath) für VHDL
- List of (library, filepath) für Verilog
"""
all_vhdl_sources = []
all_verilog_sources = []
# Root-Projekt expandieren
vhdl_sources, verilog_sources = _expand_project_sources(root_project, ".")
all_vhdl_sources.extend(vhdl_sources)
all_verilog_sources.extend(verilog_sources)
# Dependencies expandieren
for dep in resolved_dependencies:
vhdl_dep, verilog_dep = _expand_project_sources(dep.project, dep.local_path)
all_vhdl_sources.extend(vhdl_dep)
all_verilog_sources.extend(verilog_dep)
return all_vhdl_sources, all_verilog_sources