Compare commits
66 Commits
3cf3fc1437
...
v0.7.0
| Author | SHA1 | Date | |
|---|---|---|---|
| ef8dda20d8 | |||
|
8adaa916ff
|
|||
|
af0477f8e7
|
|||
| 5d8d995a04 | |||
|
36f6e7314a
|
|||
| bc8ae46b4a | |||
|
dde2363ad7
|
|||
|
32e3b51a77
|
|||
|
da387f2ee6
|
|||
|
08679c2680
|
|||
|
7b6f9ef224
|
|||
|
6ca389d5cb
|
|||
|
1d7bc19965
|
|||
|
175bf4882a
|
|||
|
0d26c42f8a
|
|||
|
fa7e738b7e
|
|||
|
60c4ca19b1
|
|||
|
38d4c7d5f3
|
|||
|
9d33b6e8be
|
|||
|
dde3b5dc5d
|
|||
|
6d6b731033
|
|||
|
8196eb3929
|
|||
|
5083852c29
|
|||
|
215e4aa545
|
|||
|
d78bfcc408
|
|||
|
8ed550f451
|
|||
|
37b684fd2d
|
|||
|
7cf69aa16f
|
|||
|
9274461d7a
|
|||
|
b79b7559f2
|
|||
| 22266f2947 | |||
| c7b2970949 | |||
| 6271eb0998 | |||
| 308de700a2 | |||
| f1315dc458 | |||
| ba8cc2ef94 | |||
| f87b08441f | |||
| 52f5a8fe24 | |||
| 836a5bf643 | |||
| ffd8d6eac9 | |||
| 44f2f15dc5 | |||
| d6d792c9e6 | |||
| 07922f8a54 | |||
| 29d04a25f0 | |||
| bbe5d7f2b1 | |||
| 84800c25a5 | |||
| e58c0897ce | |||
| 277a16e126 | |||
| ad4ba6e412 | |||
| 2f3e0d375f | |||
| 528907fabe | |||
| fae19340cc | |||
| a57062a3f8 | |||
| cef035329a | |||
| 93e67c0380 | |||
| 8caafe862e | |||
| cfa62a5624 | |||
| 263c3fac5e | |||
| 961d182bf7 | |||
| 5263a31c07 | |||
| fb5c3127b5 | |||
| 5061ee218f | |||
| dc0d8242a8 | |||
| 28406028c6 | |||
| f9d5e3c535 | |||
| 4f1f2e7d51 |
5
.gitea/default_merge_message/MERGE_TEMPLATE.md
Normal file
5
.gitea/default_merge_message/MERGE_TEMPLATE.md
Normal file
@@ -0,0 +1,5 @@
|
||||
chore(pr): ${PullRequestTitle} ${PullRequestReference}
|
||||
|
||||
${PullRequestDescription}
|
||||
|
||||
Merged from ${HeadBranch} into ${BaseBranch}
|
||||
45
.gitea/scripts/cleanup_versions.sh
Executable file
45
.gitea/scripts/cleanup_versions.sh
Executable file
@@ -0,0 +1,45 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# cleanup_dev_versions.sh - Delete old PyPI dev versions from Gitea package registry
|
||||
|
||||
# Required environment variables
|
||||
USERNAME="${TWINE_USERNAME}"
|
||||
TOKEN="${TWINE_PASSWORD}"
|
||||
REPO="${GITHUB_REPOSITORY}" # e.g., maxp/repocat
|
||||
API_BASE="${GITHUB_API_URL%/}" # Strip trailing slash if present
|
||||
|
||||
OWNER="${REPO%%/*}"
|
||||
PACKAGE_NAME="${REPO##*/}"
|
||||
API_URL="${API_BASE}/packages/${OWNER}/pypi/${PACKAGE_NAME}"
|
||||
|
||||
# Fetch the list of versions
|
||||
response=$(curl -s -u "$USERNAME:$TOKEN" "$API_URL")
|
||||
|
||||
# Extract all .dev versions, sort by creation time
|
||||
mapfile -t versions_to_delete < <(echo "$response" | jq -r '
|
||||
map(select(.version | test("\\.dev"))) |
|
||||
sort_by(.created_at) |
|
||||
.[0:-1][] |
|
||||
.version')
|
||||
|
||||
# Determine latest version to keep
|
||||
latest_version=$(echo "$response" | jq -r '
|
||||
map(select(.version | test("\\.dev"))) |
|
||||
sort_by(.created_at) |
|
||||
last.version')
|
||||
|
||||
if [[ -z "$latest_version" || ${#versions_to_delete[@]} -eq 0 ]]; then
|
||||
echo "No old .dev versions to delete."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Keeping latest .dev version: $latest_version"
|
||||
|
||||
# Delete old .dev versions
|
||||
for version in "${versions_to_delete[@]}"; do
|
||||
echo "Deleting old .dev version: $version"
|
||||
curl -s -X DELETE -u "$USERNAME:$TOKEN" "$API_URL/$version"
|
||||
done
|
||||
|
||||
echo "Cleanup complete."
|
||||
21
.gitea/scripts/get-release-id.sh
Executable file
21
.gitea/scripts/get-release-id.sh
Executable file
@@ -0,0 +1,21 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Eingaben
|
||||
TAG="$1"
|
||||
TOKEN="${ACTIONS_RUNTIME_TOKEN:-<fallback_token>}"
|
||||
REPO="${GITHUB_REPOSITORY:-owner/example}"
|
||||
API="${GITHUB_API_URL:-https://gitea.example.tld/api/v1}"
|
||||
|
||||
OWNER=$(echo "$REPO" | cut -d/ -f1)
|
||||
NAME=$(echo "$REPO" | cut -d/ -f2)
|
||||
|
||||
RESPONSE=$(curl -sf \
|
||||
-H "Authorization: token $TOKEN" \
|
||||
"$API/repos/$OWNER/$NAME/releases/tags/$TAG")
|
||||
|
||||
RELEASE_ID=$(echo "$RESPONSE" | jq -r '.id')
|
||||
echo "Release-ID für $TAG ist: $RELEASE_ID"
|
||||
|
||||
# Für GitHub Actions als Umgebungsvariable
|
||||
echo "GT_RELEASE_ID=$RELEASE_ID" >> "$GITHUB_ENV"
|
||||
14
.gitea/scripts/set_poetry_version.sh
Executable file
14
.gitea/scripts/set_poetry_version.sh
Executable file
@@ -0,0 +1,14 @@
|
||||
#!/bin/bash
|
||||
|
||||
BASE_VERSION=$(cat VERSION)
|
||||
NIGHTLY_SUFFIX=""
|
||||
|
||||
if [[ "$1" == "nightly" ]]; then
|
||||
# Beispiel: 20240511.1358 → 11. Mai, 13:58 Uhr
|
||||
NIGHTLY_SUFFIX=".dev$(date +%Y%m%d%H%M)"
|
||||
fi
|
||||
|
||||
FULL_VERSION="${BASE_VERSION}${NIGHTLY_SUFFIX}"
|
||||
|
||||
echo "Using version: $FULL_VERSION"
|
||||
poetry version "$FULL_VERSION"
|
||||
21
.gitea/scripts/sync_version_from_poetry.sh
Executable file
21
.gitea/scripts/sync_version_from_poetry.sh
Executable file
@@ -0,0 +1,21 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Stelle sicher, dass wir im Projektverzeichnis sind
|
||||
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
|
||||
cd "$ROOT_DIR"
|
||||
|
||||
PYPROJECT="pyproject.toml"
|
||||
VERSION_FILE="VERSION"
|
||||
|
||||
# Extrahiere die Version mit grep + sed (keine externen Abhängigkeiten nötig)
|
||||
VERSION=$(grep -E '^version\s*=' "$PYPROJECT" | head -n1 | sed -E 's/.*=\s*"([^"]+)".*/\1/')
|
||||
|
||||
if [[ -z "$VERSION" ]]; then
|
||||
echo "❌ Version konnte nicht aus $PYPROJECT gelesen werden."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
printf "%s" "$VERSION" > "$VERSION_FILE"
|
||||
echo "✅ Version synchronisiert: $VERSION → $VERSION_FILE"
|
||||
40
.gitea/scripts/upload-asset.sh
Executable file
40
.gitea/scripts/upload-asset.sh
Executable file
@@ -0,0 +1,40 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Eingabeparameter
|
||||
FILE_PATH="$1" # z. B. ./dist/build.zip
|
||||
CUSTOM_NAME="${2:-}" # optional: anderer Name unter dem das Asset gespeichert werden soll
|
||||
RELEASE_ID="${GT_RELEASE_ID:-}" # aus Umgebung
|
||||
|
||||
# Validierung
|
||||
if [[ -z "$RELEASE_ID" ]]; then
|
||||
echo "❌ RELEASE_ID ist nicht gesetzt. Abbruch."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ ! -f "$FILE_PATH" ]]; then
|
||||
echo "❌ Datei '$FILE_PATH' existiert nicht. Abbruch."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Default-Konfiguration
|
||||
TOKEN="${ACTIONS_RUNTIME_TOKEN:-<fallback_token>}"
|
||||
REPO="${GITHUB_REPOSITORY:-owner/example}"
|
||||
API="${GITHUB_API_URL:-https://gitea.example.tld/api/v1}"
|
||||
|
||||
# Owner/Repo auflösen
|
||||
OWNER=$(echo "$REPO" | cut -d/ -f1)
|
||||
NAME=$(echo "$REPO" | cut -d/ -f2)
|
||||
|
||||
# Dateiname setzen
|
||||
FILENAME="${CUSTOM_NAME:-$(basename "$FILE_PATH")}"
|
||||
|
||||
echo "🔼 Uploading '$FILE_PATH' as '$FILENAME' to release ID $RELEASE_ID"
|
||||
|
||||
# Upload
|
||||
curl -sf -X POST \
|
||||
-H "Authorization: token $TOKEN" \
|
||||
-F "attachment=@$FILE_PATH" \
|
||||
"$API/repos/$OWNER/$NAME/releases/$RELEASE_ID/assets?name=$FILENAME"
|
||||
|
||||
echo "✅ Upload abgeschlossen: $FILENAME"
|
||||
62
.gitea/workflows/build-and-deploy-nightly.yml
Normal file
62
.gitea/workflows/build-and-deploy-nightly.yml
Normal file
@@ -0,0 +1,62 @@
|
||||
name: Build and Publish nightly package
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths-ignore:
|
||||
- 'CHANGELOG.md'
|
||||
|
||||
jobs:
|
||||
build-and-publish:
|
||||
runs-on: ubuntu-22.04
|
||||
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: 🐍 Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.12"
|
||||
|
||||
- name: 🔄 Restore cache
|
||||
uses: https://git.0xmax42.io/actions/cache@v1
|
||||
with:
|
||||
key: poetry-v1-${{ runner.os }}-${{ hashFiles('poetry.lock') }}
|
||||
paths: |
|
||||
~/.cache/pypoetry
|
||||
~/.cache/pip
|
||||
|
||||
- name: Install Poetry
|
||||
run: |
|
||||
pip install poetry
|
||||
|
||||
- name: Install Project Dependencies
|
||||
working-directory: .
|
||||
run: |
|
||||
poetry install
|
||||
|
||||
- name: Set version from VERSION file (with nightly suffix)
|
||||
run: ./.gitea/scripts/set_poetry_version.sh nightly
|
||||
|
||||
- name: Build Package
|
||||
working-directory: .
|
||||
run: |
|
||||
poetry build
|
||||
|
||||
- name: Publish to Gitea Package Registry
|
||||
working-directory: .
|
||||
env:
|
||||
TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }}
|
||||
TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }}
|
||||
run: |
|
||||
poetry run twine upload --repository-url ${{ secrets.TWINE_URL }} dist/*
|
||||
|
||||
- name: Cleanup old dev versions
|
||||
run: |
|
||||
.gitea/scripts/cleanup_versions.sh '\.dev'
|
||||
env:
|
||||
TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }}
|
||||
TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }}
|
||||
64
.gitea/workflows/build-and-deploy-release.yml
Normal file
64
.gitea/workflows/build-and-deploy-release.yml
Normal file
@@ -0,0 +1,64 @@
|
||||
name: Build and Publish nightly package
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
build-and-publish:
|
||||
runs-on: ubuntu-22.04
|
||||
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.release.tag_name }}
|
||||
|
||||
- name: 🐍 Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.12"
|
||||
|
||||
- name: 🔄 Restore cache
|
||||
uses: https://git.0xmax42.io/actions/cache@v1
|
||||
with:
|
||||
key: poetry-v1-${{ runner.os }}-${{ hashFiles('poetry.lock') }}
|
||||
paths: |
|
||||
~/.cache/pypoetry
|
||||
~/.cache/pip
|
||||
|
||||
- name: Install Poetry
|
||||
run: |
|
||||
pip install poetry
|
||||
|
||||
- name: Install Project Dependencies
|
||||
working-directory: .
|
||||
run: |
|
||||
poetry install
|
||||
|
||||
- name: Build Package
|
||||
working-directory: .
|
||||
run: |
|
||||
poetry build
|
||||
|
||||
- name: Get built wheel filename
|
||||
id: get_whl
|
||||
run: |
|
||||
echo "whl_file=$(basename dist/*.whl)" >> $GITHUB_OUTPUT
|
||||
echo "sdist_file=$(basename dist/*.tar.gz)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Publish to Gitea Package Registry
|
||||
working-directory: .
|
||||
env:
|
||||
TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }}
|
||||
TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }}
|
||||
run: |
|
||||
poetry run twine upload --repository-url ${{ secrets.TWINE_URL }} dist/*
|
||||
|
||||
- name: Get Release ID from tag
|
||||
run: .gitea/scripts/get-release-id.sh "${{ github.event.release.tag_name }}"
|
||||
|
||||
- name: Upload assets
|
||||
run: |
|
||||
.gitea/scripts/upload-asset.sh ./dist/${{ steps.get_whl.outputs.whl_file }}
|
||||
.gitea/scripts/upload-asset.sh ./dist/${{ steps.get_whl.outputs.sdist_file }}
|
||||
18
.gitea/workflows/release.yml
Normal file
18
.gitea/workflows/release.yml
Normal file
@@ -0,0 +1,18 @@
|
||||
name: Auto Changelog & (Release)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Release
|
||||
uses: https://git.0xmax42.io/actions/auto-changelog-release-action@v0
|
||||
with:
|
||||
token: ${{ secrets.RELEASE_PUBLISH_TOKEN }}
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -185,3 +185,4 @@ poetry.lock
|
||||
project.yml
|
||||
.project/
|
||||
.devcontainer/
|
||||
vhdltests/
|
||||
15
.vscode/settings.json
vendored
Normal file
15
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"python.envFile": "${workspaceFolder}/.env",
|
||||
"python.analysis.extraPaths": [
|
||||
"src"
|
||||
],
|
||||
"workbench.colorCustomizations": {
|
||||
"activityBar.activeBackground": "#8dc4ff",
|
||||
"activityBar.background": "#8dc4ff",
|
||||
"activityBar.foreground": "#15202b",
|
||||
"activityBar.inactiveForeground": "#15202b99",
|
||||
"activityBarBadge.background": "#ff007b",
|
||||
"activityBarBadge.foreground": "#e7e7e7"
|
||||
},
|
||||
"peacock.color": "#5aaaff"
|
||||
}
|
||||
38
CHANGELOG.md
Normal file
38
CHANGELOG.md
Normal file
@@ -0,0 +1,38 @@
|
||||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
## [0.7.0](https://git.0xmax42.io/maxp/hdlbuild/compare/v0.6.1..v0.7.0) - 2025-07-17
|
||||
|
||||
### 🚀 Features
|
||||
|
||||
- *(version)* Bump version to 0.7.0 - ([8adaa91](https://git.0xmax42.io/maxp/hdlbuild/commit/8adaa916ff4e736e9da707c232d6f57b788e57e8))
|
||||
- *(cli)* Add template generation commands - ([af0477f](https://git.0xmax42.io/maxp/hdlbuild/commit/af0477f8e74a9471c3e7d36877069592e41f651c))
|
||||
|
||||
## [0.6.1] - 2025-07-16
|
||||
|
||||
### 🚀 Features
|
||||
|
||||
- *(ci)* Add workflows for nightly builds and releases - ([da387f2](https://git.0xmax42.io/maxp/hdlbuild/commit/da387f2ee602390d616c79bf4057ccf941e21462))
|
||||
|
||||
### 🚜 Refactor
|
||||
|
||||
- Use typer for CLI argument parsing - ([6ca389d](https://git.0xmax42.io/maxp/hdlbuild/commit/6ca389d5cbbeff53faab9d61376a8c77ed097b6c))
|
||||
- Improves project configuration - ([175bf48](https://git.0xmax42.io/maxp/hdlbuild/commit/175bf4882a8f172ee536d726b31136690572be36))
|
||||
|
||||
### 📚 Documentation
|
||||
|
||||
- *(readme)* Expand README with detailed usage and setup - ([fa7e738](https://git.0xmax42.io/maxp/hdlbuild/commit/fa7e738b7eade5a627218741a6fb4bd1617f7801))
|
||||
|
||||
### 🎨 Styling
|
||||
|
||||
- *(pyproject)* Simplify include array formatting - ([08679c2](https://git.0xmax42.io/maxp/hdlbuild/commit/08679c2680b49119e0414688a80e8dc2659236b4))
|
||||
- Updates VS Code editor color scheme - ([1d7bc19](https://git.0xmax42.io/maxp/hdlbuild/commit/1d7bc1996522ab54970348b5118ad319849a6a1f))
|
||||
|
||||
### ⚙️ Miscellaneous Tasks
|
||||
|
||||
- *(config)* Add git-cliff configuration file - ([dde2363](https://git.0xmax42.io/maxp/hdlbuild/commit/dde2363ad7dd2fd2d48c6154e3b88c1c4a6867fd))
|
||||
- Updates project version to 0.6.0 - ([7b6f9ef](https://git.0xmax42.io/maxp/hdlbuild/commit/7b6f9ef2240864b103903e79c895a76db59c14fa))
|
||||
- Remove build-deb.yml workflow file - ([0d26c42](https://git.0xmax42.io/maxp/hdlbuild/commit/0d26c42f8ae419d509aa47d3f7a23bfdd08cf79b))
|
||||
|
||||
|
||||
168
README.md
168
README.md
@@ -1,2 +1,168 @@
|
||||
# hdlbuild
|
||||
# HDLBuild
|
||||
|
||||
HDLBuild is a flexible build management tool for FPGA projects. It simplifies the process of managing dependencies, building, testing, and deploying FPGA designs using Xilinx ISE tools.
|
||||
|
||||
## Features
|
||||
|
||||
- **Dependency Management**: Automatically resolves and manages project dependencies from Git repositories.
|
||||
- **Build Automation**: Supports synthesis, implementation, and bitstream generation for FPGA designs.
|
||||
- **Testbench Execution**: Automates the process of building and running testbenches.
|
||||
- **Customizable Tool Options**: Provides extensive configuration options for Xilinx ISE tools.
|
||||
- **Project Initialization**: Quickly set up new projects with predefined templates.
|
||||
- **Rich Console Output**: Provides detailed and interactive console feedback using `rich`.
|
||||
|
||||
---
|
||||
|
||||
## Installation
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Python 3.10 or higher
|
||||
- Poetry (for dependency management)
|
||||
- Xilinx ISE (14.7) installed and configured
|
||||
|
||||
### Steps
|
||||
|
||||
1. Clone the repository:
|
||||
```bash
|
||||
git clone https://github.com/your-repo/hdlbuild.git
|
||||
cd hdlbuild
|
||||
```
|
||||
|
||||
2. Install dependencies:
|
||||
```bash
|
||||
poetry install
|
||||
```
|
||||
|
||||
3. Add the `hdlbuild` CLI to your PATH:
|
||||
```bash
|
||||
poetry shell
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Usage
|
||||
|
||||
### CLI Commands
|
||||
|
||||
HDLBuild provides a command-line interface (CLI) for managing FPGA projects. Below are the available commands:
|
||||
|
||||
#### 1. **Initialize a New Project**
|
||||
```bash
|
||||
hdlbuild init
|
||||
```
|
||||
- Creates a new project with a project.yml configuration file and a .gitignore file.
|
||||
|
||||
#### 2. **Resolve Dependencies**
|
||||
```bash
|
||||
hdlbuild dep
|
||||
```
|
||||
- Clones and resolves all project dependencies defined in project.yml.
|
||||
|
||||
#### 3. **Build the Project**
|
||||
```bash
|
||||
hdlbuild build
|
||||
```
|
||||
- Runs the full build process, including synthesis, implementation, and bitstream generation.
|
||||
|
||||
- To only synthesize the design:
|
||||
```bash
|
||||
hdlbuild build synth
|
||||
```
|
||||
|
||||
#### 4. **Run Testbenches**
|
||||
```bash
|
||||
hdlbuild test <testbench_name>
|
||||
```
|
||||
- Builds and runs the specified testbench.
|
||||
|
||||
#### 5. **Clean Build Artifacts**
|
||||
```bash
|
||||
hdlbuild clean
|
||||
```
|
||||
- Removes build artifacts.
|
||||
|
||||
- To clean all generated files:
|
||||
```bash
|
||||
hdlbuild clean all
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Configuration
|
||||
|
||||
The project is configured using a project.yml file. Below is an example configuration:
|
||||
|
||||
```yml
|
||||
name: MyFPGAProject
|
||||
topmodule: top_module
|
||||
target_device: xc3s1200e-4-fg320
|
||||
xilinx_path: /opt/Xilinx/14.7/ISE_DS/ISE
|
||||
|
||||
constraints: constraints.ucf
|
||||
|
||||
sources:
|
||||
vhdl:
|
||||
- path: src/*.vhd
|
||||
library: work
|
||||
|
||||
testbenches:
|
||||
vhdl:
|
||||
- path: tests/*.vhd
|
||||
library: work
|
||||
|
||||
dependencies:
|
||||
- git: "https://github.com/example/dependency.git"
|
||||
rev: "main"
|
||||
|
||||
build:
|
||||
build_dir: working
|
||||
report_dir: reports
|
||||
copy_target_dir: output
|
||||
|
||||
tool_options:
|
||||
xst:
|
||||
- "-opt_mode Speed"
|
||||
- "-opt_level 2"
|
||||
map:
|
||||
- "-detail"
|
||||
- "-timing"
|
||||
par: []
|
||||
bitgen:
|
||||
- "-g StartupClk:JtagClk"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Development
|
||||
|
||||
### Building the Package
|
||||
|
||||
To build the Python package:
|
||||
```bash
|
||||
poetry build
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## GitHub Actions
|
||||
|
||||
The project includes GitHub workflows for building and deploying the package:
|
||||
|
||||
1. **Build and Publish**: build-and-deploy.yml
|
||||
|
||||
---
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the MIT License.
|
||||
|
||||
---
|
||||
|
||||
## Contributing
|
||||
|
||||
Contributions are welcome! Please follow these steps:
|
||||
|
||||
1. Fork the repository.
|
||||
2. Create a new branch for your feature or bugfix.
|
||||
3. Submit a pull request.
|
||||
|
||||
104
cliff.toml
Normal file
104
cliff.toml
Normal file
@@ -0,0 +1,104 @@
|
||||
# CLIFF_VERSION=2.8.0
|
||||
# git-cliff ~ default configuration file
|
||||
# https://git-cliff.org/docs/configuration
|
||||
#
|
||||
# Lines starting with "#" are comments.
|
||||
# Configuration options are organized into tables and keys.
|
||||
# See documentation for more information on available options.
|
||||
[remote.gitea]
|
||||
owner = "maxp"
|
||||
repo = "hdlbuild"
|
||||
|
||||
[changelog]
|
||||
# postprocessors
|
||||
postprocessors = [
|
||||
{ pattern = '<GITEA_URL>', replace = "https://git.0xmax42.io" }, # replace gitea url
|
||||
]
|
||||
|
||||
# template for the changelog header
|
||||
header = """
|
||||
# Changelog\n
|
||||
All notable changes to this project will be documented in this file.\n
|
||||
"""
|
||||
# template for the changelog body
|
||||
# https://keats.github.io/tera/docs/#introduction
|
||||
body = """
|
||||
{%- macro remote_url() -%}
|
||||
<GITEA_URL>/{{ remote.gitea.owner }}/{{ remote.gitea.repo }}
|
||||
{%- endmacro -%}
|
||||
|
||||
{% if version %}\
|
||||
{% if previous.version %}\
|
||||
## [{{ version | trim_start_matches(pat="v") }}]\
|
||||
({{ self::remote_url() }}/compare/{{ previous.version }}..{{ version }}) - {{ timestamp | date(format="%Y-%m-%d") }}
|
||||
{% else %}\
|
||||
## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }}
|
||||
{% endif %}\
|
||||
{% else %}\
|
||||
## [unreleased]
|
||||
{% endif %}\
|
||||
{% for group, commits in commits | group_by(attribute="group") %}
|
||||
### {{ group | striptags | trim | upper_first }}
|
||||
{% for commit in commits %}
|
||||
- {% if commit.scope %}*({{ commit.scope }})* {% endif %}\
|
||||
{% if commit.breaking %}[**breaking**] {% endif %}\
|
||||
{{ commit.message | upper_first }} - \
|
||||
([{{ commit.id | truncate(length=7, end="") }}]({{ self::remote_url() }}/commit/{{ commit.id }}))\
|
||||
{% endfor %}
|
||||
{% endfor %}\n
|
||||
"""
|
||||
# template for the changelog footer
|
||||
footer = """
|
||||
|
||||
"""
|
||||
# remove the leading and trailing s
|
||||
trim = true
|
||||
|
||||
# render body even when there are no releases to process
|
||||
# render_always = true
|
||||
# output file path
|
||||
# output = "test.md"
|
||||
|
||||
[git]
|
||||
# parse the commits based on https://www.conventionalcommits.org
|
||||
conventional_commits = true
|
||||
# filter out the commits that are not conventional
|
||||
filter_unconventional = true
|
||||
# process each line of a commit as an individual commit
|
||||
split_commits = false
|
||||
# regex for preprocessing the commit messages
|
||||
commit_preprocessors = [
|
||||
# Replace issue numbers
|
||||
#{ pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](<REPO>/issues/${2}))"},
|
||||
# Check spelling of the commit with https://github.com/crate-ci/typos
|
||||
# If the spelling is incorrect, it will be automatically fixed.
|
||||
#{ pattern = '.*', replace_command = 'typos --write-changes -' },
|
||||
]
|
||||
# regex for parsing and grouping commits
|
||||
commit_parsers = [
|
||||
{ message = "^feat", group = "<!-- 0 -->🚀 Features" },
|
||||
{ message = "^fix", group = "<!-- 1 -->🐛 Bug Fixes" },
|
||||
{ message = "^doc", group = "<!-- 3 -->📚 Documentation" },
|
||||
{ message = "^perf", group = "<!-- 4 -->⚡ Performance" },
|
||||
{ message = "^refactor", group = "<!-- 2 -->🚜 Refactor" },
|
||||
{ message = "^style", group = "<!-- 5 -->🎨 Styling" },
|
||||
{ message = "^test", group = "<!-- 6 -->🧪 Testing" },
|
||||
{ message = "^chore\\(changelog\\)", skip = true },
|
||||
{ message = "^chore\\(version\\)", skip = true },
|
||||
{ message = "^chore\\(release\\): prepare for", skip = true },
|
||||
{ message = "^chore\\(deps.*\\)", skip = true },
|
||||
{ message = "^chore\\(pr\\)", skip = true },
|
||||
{ message = "^chore\\(pull\\)", skip = true },
|
||||
{ message = "^chore|^ci", group = "<!-- 7 -->⚙️ Miscellaneous Tasks" },
|
||||
{ body = ".*security", group = "<!-- 8 -->🛡️ Security" },
|
||||
{ message = "^revert", group = "<!-- 9 -->◀️ Revert" },
|
||||
{ message = ".*", group = "<!-- 10 -->💼 Other" },
|
||||
]
|
||||
# Regex to select git tags that represent releases.
|
||||
tag_pattern = "v[0-9]+\\.[0-9]+\\.[0-9]+"
|
||||
# filter out the commits that are not matched by commit parsers
|
||||
filter_commits = false
|
||||
# sort the tags topologically
|
||||
topo_order = false
|
||||
# sort the commits inside sections by oldest/newest order
|
||||
sort_commits = "newest"
|
||||
@@ -1,41 +1,268 @@
|
||||
name: VGA_Test
|
||||
topmodule: VGA_Test_Top
|
||||
name:
|
||||
topmodule:
|
||||
target_device: xc3s1200e-4-fg320
|
||||
xilinx_path: /opt/Xilinx/14.7/ISE_DS/ISE
|
||||
|
||||
constraints:
|
||||
|
||||
sources:
|
||||
vhdl:
|
||||
- path: src/vga/*.vhd
|
||||
- path: src/*.vhd
|
||||
library: work
|
||||
- path: src/common/*.vhd
|
||||
library: work
|
||||
- path: src/VGA_test_top.vhd
|
||||
library: work
|
||||
|
||||
verilog:
|
||||
- path: src/old_modules/*.v
|
||||
library: work
|
||||
|
||||
dependencies:
|
||||
- name: AsyncFIFO
|
||||
git: "https://github.com/0xMax32/Asynchronous-FIFO-AXI-Handshake.git"
|
||||
rev: "main"
|
||||
library: asyncfifo
|
||||
- name: GrayCounter
|
||||
git: "https://github.com/0xMax32/Gray-Counter.git"
|
||||
rev: "v1.0.0"
|
||||
library: graycounter
|
||||
|
||||
testbenches:
|
||||
vhdl:
|
||||
- path: src/tests/*.vhd
|
||||
- path: tests/*.vhd
|
||||
library: work
|
||||
|
||||
verilog: []
|
||||
|
||||
constraints: constraints/VGA_Test.ucf
|
||||
dependencies:
|
||||
# - git: "https://git.0xmax42.io/maxp/Asynchronous-FIFO-AXI-Handshake.git"
|
||||
# rev: "hdlbuild"
|
||||
|
||||
build:
|
||||
build_dir: working
|
||||
report_dir: reports
|
||||
copy_target_dir: output
|
||||
|
||||
# Tool Optionen
|
||||
tool_options:
|
||||
common:
|
||||
- "-intstyle"
|
||||
- "xflow"
|
||||
|
||||
ngdbuild: []
|
||||
|
||||
map:
|
||||
- "-detail"
|
||||
- "-timing"
|
||||
- "-ol"
|
||||
- "high"
|
||||
|
||||
par: []
|
||||
|
||||
bitgen:
|
||||
- "-g"
|
||||
- "StartupClk:JtagClk"
|
||||
|
||||
trace:
|
||||
- "-v"
|
||||
- "3"
|
||||
- "-n"
|
||||
- "3"
|
||||
|
||||
fuse:
|
||||
- "-incremental"
|
||||
|
||||
isim:
|
||||
- "-gui"
|
||||
|
||||
xst:
|
||||
# Optimization goal: prioritize speed or area.
|
||||
# Values: Speed | Area
|
||||
- "-opt_mode Speed"
|
||||
|
||||
# Optimization level: more aggressive optimizations at level 2.
|
||||
# Values: 1 | 2
|
||||
- "-opt_level 2"
|
||||
|
||||
# Use the new XST parser (recommended for modern designs).
|
||||
# Values: yes | no
|
||||
- "-use_new_parser yes"
|
||||
|
||||
# Preserve design hierarchy or allow flattening for optimization.
|
||||
# Values: Yes | No | Soft
|
||||
- "-keep_hierarchy No"
|
||||
|
||||
# Determines how hierarchy is preserved in the netlist.
|
||||
# Values: As_Optimized | Rebuilt
|
||||
- "-netlist_hierarchy As_Optimized"
|
||||
|
||||
# Global optimization strategy for nets.
|
||||
# Values: AllClockNets | Offset_In_Before | Offset_Out_After | Inpad_To_Outpad | Max_Delay
|
||||
- "-glob_opt AllClockNets"
|
||||
|
||||
## Misc ##
|
||||
|
||||
# Enable reading of IP cores.
|
||||
# Values: YES | NO
|
||||
- "-read_cores YES"
|
||||
|
||||
# Do not write timing constraints into synthesis report.
|
||||
# Values: YES | NO
|
||||
- "-write_timing_constraints NO"
|
||||
|
||||
# Analyze paths across different clock domains.
|
||||
# Values: YES | NO
|
||||
- "-cross_clock_analysis NO"
|
||||
|
||||
# Character used to separate hierarchy levels in instance names.
|
||||
# Default: /
|
||||
- "-hierarchy_separator /"
|
||||
|
||||
# Delimiters used for bus signals.
|
||||
# Values: <> | [] | () | {}
|
||||
- "-bus_delimiter <>"
|
||||
|
||||
# Maintain original case of identifiers.
|
||||
# Values: Maintain | Upper | Lower
|
||||
- "-case Maintain"
|
||||
|
||||
# Target maximum utilization ratio for slices.
|
||||
# Values: 1–100
|
||||
- "-slice_utilization_ratio 100"
|
||||
|
||||
# Target maximum utilization ratio for BRAMs.
|
||||
# Values: 1–100
|
||||
- "-bram_utilization_ratio 100"
|
||||
|
||||
# Use Verilog 2001 syntax features.
|
||||
# Values: YES | NO
|
||||
- "-verilog2001 YES"
|
||||
|
||||
#### HDL Options ####
|
||||
|
||||
## FSM ##
|
||||
|
||||
# Extract FSMs (Finite State Machines) from HDL code.
|
||||
# Values: YES | NO
|
||||
- "-fsm_extract YES"
|
||||
|
||||
# Encoding strategy for FSMs.
|
||||
# Values: Auto | Gray | One-Hot | Johnson | Compact | Sequential | Speed1 | User
|
||||
- "-fsm_encoding Auto"
|
||||
|
||||
# Add safe logic for undefined FSM states.
|
||||
# Values: Yes | No
|
||||
- "-safe_implementation No"
|
||||
|
||||
# Structure used to implement FSMs.
|
||||
# Values: LUT | BRAM
|
||||
- "-fsm_style LUT"
|
||||
|
||||
## RAM/ROM ##
|
||||
|
||||
# Extract RAM inference from HDL.
|
||||
# Values: Yes | No
|
||||
- "-ram_extract Yes"
|
||||
|
||||
# Style used to implement RAM.
|
||||
# Values: Auto | Block | Distributed
|
||||
- "-ram_style Auto"
|
||||
|
||||
# Extract ROM inference from HDL.
|
||||
# Values: Yes | No
|
||||
- "-rom_extract Yes"
|
||||
|
||||
# Style used for implementing ROM.
|
||||
# Values: Auto | Distributed | Block
|
||||
- "-rom_style Auto"
|
||||
|
||||
# Enable or disable automatic BRAM packing.
|
||||
# Values: YES | NO
|
||||
- "-auto_bram_packing NO"
|
||||
|
||||
## MUX/Decoder/Shift Register ##
|
||||
|
||||
# Extract multiplexers where possible.
|
||||
# Values: Yes | No | Force
|
||||
- "-mux_extract Yes"
|
||||
|
||||
# Style used for implementing MUX logic.
|
||||
# Values: Auto | MUXCY | MUXF
|
||||
- "-mux_style Auto"
|
||||
|
||||
# Extract decoder logic from behavioral code.
|
||||
# Values: YES | NO
|
||||
- "-decoder_extract YES"
|
||||
|
||||
# Extract and optimize priority encoder structures.
|
||||
# Values: Yes | No | Force
|
||||
- "-priority_extract Yes"
|
||||
|
||||
# Extract shift register logic.
|
||||
# Values: YES | NO
|
||||
- "-shreg_extract YES"
|
||||
|
||||
# Extract simple shift operations into dedicated hardware.
|
||||
# Values: YES | NO
|
||||
- "-shift_extract YES"
|
||||
|
||||
## Multiplier ##
|
||||
|
||||
# Style for implementing multipliers.
|
||||
# Values: Auto | LUT | Pipe_LUT | Pipe_Block | Block
|
||||
- "-mult_style Auto"
|
||||
|
||||
## Misc ##
|
||||
|
||||
# Collapse XOR trees where beneficial.
|
||||
# Values: YES | NO
|
||||
- "-xor_collapse YES"
|
||||
|
||||
# Share resources like adders or multipliers between logic blocks.
|
||||
# Values: YES | NO | Force
|
||||
- "-resource_sharing YES"
|
||||
|
||||
# Convert asynchronous resets to synchronous where possible.
|
||||
# Values: YES | NO
|
||||
- "-async_to_sync NO"
|
||||
|
||||
#### Xilinx Specific Options ####
|
||||
|
||||
## Optimization ##
|
||||
|
||||
# Enable removal of logically equivalent registers.
|
||||
# Values: YES | NO
|
||||
- "-equivalent_register_removal YES"
|
||||
|
||||
# Duplicate registers to reduce fanout or improve timing.
|
||||
# Values: YES | NO
|
||||
- "-register_duplication YES"
|
||||
|
||||
# Move registers across logic to balance timing.
|
||||
# Values: Yes | No | Forward | Backward
|
||||
- "-register_balancing No"
|
||||
|
||||
# Use clock enable signals where possible.
|
||||
# Values: Auto | Yes | No
|
||||
- "-use_clock_enable Yes"
|
||||
|
||||
# Use synchronous set (preset) signals when available.
|
||||
# Values: Auto | Yes | No
|
||||
- "-use_sync_set Yes"
|
||||
|
||||
# Use synchronous reset signals where possible.
|
||||
# Values: Auto | Yes | No
|
||||
- "-use_sync_reset Yes"
|
||||
|
||||
## I/O ##
|
||||
|
||||
# Insert IO buffers for top-level ports.
|
||||
# Values: YES | NO
|
||||
- "-iobuf YES"
|
||||
|
||||
# Placement strategy for IOB registers (Auto = let tools decide).
|
||||
# Values: Auto | YES | NO
|
||||
- "-iob Auto"
|
||||
|
||||
## Misc ##
|
||||
|
||||
# Maximum allowed fanout for a net.
|
||||
# Values: integer (e.g., 500)
|
||||
- "-max_fanout 500"
|
||||
|
||||
# Maximum number of BUFGs (global buffers) to use.
|
||||
# Values: 0–32 (device-dependent)
|
||||
- "-bufg 24"
|
||||
|
||||
# Enable logic packing into slices.
|
||||
# Values: YES | NO
|
||||
- "-slice_packing YES"
|
||||
|
||||
# Try to reduce the number of primitive instances used.
|
||||
# Values: YES | NO
|
||||
- "-optimize_primitives NO"
|
||||
|
||||
# Margin in percent beyond the target slice utilization.
|
||||
# Values: 0–100
|
||||
- "-slice_utilization_ratio_maxmargin 5"
|
||||
@@ -1,18 +1,27 @@
|
||||
[tool.poetry]
|
||||
name = "hdlbuild"
|
||||
version = "0.1.0"
|
||||
version = "0.7.0"
|
||||
description = "Flexible FPGA Build System"
|
||||
authors = ["0xMax42 <Mail@0xMax42.io>"]
|
||||
license = "MIT"
|
||||
readme = "README.md"
|
||||
packages = [{ include = "hdlbuild", from = "src" }]
|
||||
include = ["src/hdlbuild/templates/*"]
|
||||
|
||||
[tool.poetry.scripts]
|
||||
hdlbuild = "hdlbuild.cli:main"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.10"
|
||||
pyyaml = "^6.0.2"
|
||||
doit = "^0.36.0"
|
||||
pydantic = "^2.11.3"
|
||||
rich = "^14.0.0"
|
||||
gitpython = "^3.1.44"
|
||||
typer = "^0.16.0"
|
||||
jinja2 = "^3.1.6"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
twine = "^6.1.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
|
||||
@@ -1,70 +0,0 @@
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
from tools.xilinx_ise.main import xilinx_ise_all, xilinx_ise_synth
|
||||
from utils.console_utils import ConsoleUtils
|
||||
from utils.directory_manager import clear_build_directories, clear_directories, ensure_directories_exist
|
||||
from utils.project_loader import load_project_config
|
||||
|
||||
project = load_project_config()
|
||||
console_utils = ConsoleUtils("hdlbuild")
|
||||
|
||||
def clear(args):
|
||||
"""Clears the build artifacts."""
|
||||
if args.target == "all":
|
||||
console_utils.print("Starting clear all process...")
|
||||
clear_directories()
|
||||
console_utils.print("All cleared.")
|
||||
else:
|
||||
console_utils.print("Clearing build artifacts...")
|
||||
clear_build_directories()
|
||||
console_utils.print("Build artifacts cleared.")
|
||||
|
||||
def build(args):
|
||||
"""Starts the build process."""
|
||||
console_utils.print("Starting build process...")
|
||||
ensure_directories_exist(True)
|
||||
xilinx_ise_all(project)
|
||||
|
||||
def synth(args):
|
||||
"""Starts the build process."""
|
||||
console_utils.print("Starting build process...")
|
||||
ensure_directories_exist()
|
||||
xilinx_ise_synth(project)
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="hdlbuild - Build management tool for FPGA projects",
|
||||
formatter_class=argparse.RawTextHelpFormatter
|
||||
)
|
||||
|
||||
subparsers = parser.add_subparsers(
|
||||
title="Commands",
|
||||
description="Available commands",
|
||||
dest="command",
|
||||
required=True
|
||||
)
|
||||
|
||||
# Clear command
|
||||
parser_clear = subparsers.add_parser("clear", help="Clear build artifacts")
|
||||
parser_clear.add_argument(
|
||||
"target",
|
||||
nargs="?",
|
||||
choices=["all"],
|
||||
help="Specify 'all' to clear everything (optional)"
|
||||
)
|
||||
parser_clear.set_defaults(func=clear)
|
||||
|
||||
# Build command
|
||||
parser_build = subparsers.add_parser("build", help="Start the build process")
|
||||
parser_build.set_defaults(func=build)
|
||||
|
||||
# Synth command
|
||||
parser_build = subparsers.add_parser("synth", help="Start the synth process")
|
||||
parser_build.set_defaults(func=synth)
|
||||
|
||||
args = parser.parse_args()
|
||||
args.func(args)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
0
src/hdlbuild/__init__.py
Normal file
0
src/hdlbuild/__init__.py
Normal file
33
src/hdlbuild/cli.py
Normal file
33
src/hdlbuild/cli.py
Normal file
@@ -0,0 +1,33 @@
|
||||
import typer
|
||||
from importlib.metadata import version, PackageNotFoundError
|
||||
|
||||
from hdlbuild.commands.gen import cli as gen_cli
|
||||
from hdlbuild.commands.build import cli as build_cli
|
||||
from hdlbuild.commands.clean import cli as clean_cli
|
||||
from hdlbuild.commands.dep import cli as dep_cli
|
||||
from hdlbuild.commands.test import cli as test_cli
|
||||
from hdlbuild.commands.init import cli as init_cli
|
||||
|
||||
def get_version() -> str:
|
||||
try:
|
||||
return version("hdlbuild")
|
||||
except PackageNotFoundError:
|
||||
return "unknown"
|
||||
|
||||
app = typer.Typer(
|
||||
rich_help_panel="ℹ️ HDLBuild – FPGA‑Build‑Tool",
|
||||
help=f"hdlbuild v{get_version()} – Build‑Management for FPGA projects"
|
||||
)
|
||||
|
||||
app.add_typer(build_cli, name="build", help="Build the project")
|
||||
app.add_typer(clean_cli, name="clean", help="Clean build artifacts")
|
||||
app.add_typer(dep_cli, name="dep", help="Resolve dependencies")
|
||||
app.add_typer(test_cli, name="test", help="Run simulations/testbenches")
|
||||
app.add_typer(init_cli, name="init", help="Initialize project")
|
||||
app.add_typer(gen_cli, name="gen", help="Generate HDL files from templates")
|
||||
|
||||
def main():
|
||||
app()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
34
src/hdlbuild/commands/build.py
Normal file
34
src/hdlbuild/commands/build.py
Normal file
@@ -0,0 +1,34 @@
|
||||
import typer
|
||||
|
||||
from hdlbuild.tools.xilinx_ise.main import xilinx_ise_all, xilinx_ise_synth
|
||||
from hdlbuild.utils.console_utils import ConsoleUtils
|
||||
from hdlbuild.utils.directory_manager import ensure_directories_exist
|
||||
from hdlbuild.utils.project_loader import load_project_config
|
||||
|
||||
cli = typer.Typer(rich_help_panel="🔨 Build Commands")
|
||||
|
||||
@cli.callback(invoke_without_command=True)
|
||||
def build(
|
||||
target: str = typer.Argument(
|
||||
None,
|
||||
help="Optional: 'synth' to run synthesis only",
|
||||
show_default=False,
|
||||
rich_help_panel="🔨 Build Commands",
|
||||
)
|
||||
) -> None:
|
||||
"""
|
||||
Run the full build flow or synthesis only.
|
||||
|
||||
* `hdlbuild build` → full flow
|
||||
* `hdlbuild build synth` → synthesis only
|
||||
"""
|
||||
console = ConsoleUtils("hdlbuild")
|
||||
project = load_project_config()
|
||||
|
||||
ensure_directories_exist(True)
|
||||
if target == "synth":
|
||||
console.print("Starting synthesis …")
|
||||
xilinx_ise_synth(project)
|
||||
else:
|
||||
console.print("Starting full build …")
|
||||
xilinx_ise_all(project)
|
||||
35
src/hdlbuild/commands/clean.py
Normal file
35
src/hdlbuild/commands/clean.py
Normal file
@@ -0,0 +1,35 @@
|
||||
import typer
|
||||
|
||||
from hdlbuild.utils.console_utils import ConsoleUtils
|
||||
from hdlbuild.utils.directory_manager import clear_build_directories, clear_directories
|
||||
|
||||
cli = typer.Typer(rich_help_panel="🧹 Clean Commands")
|
||||
|
||||
@cli.callback(invoke_without_command=True)
|
||||
def clean(
|
||||
target: str = typer.Argument(
|
||||
None,
|
||||
help="Optional: 'all' → wipe *all* artefacts, otherwise only the build directory",
|
||||
show_default=False,
|
||||
)
|
||||
) -> None:
|
||||
"""
|
||||
Remove build artefacts (`build/*`) or *everything* (`all`).
|
||||
|
||||
Examples
|
||||
--------
|
||||
```bash
|
||||
hdlbuild clean # build/* and temporary files only
|
||||
hdlbuild clean all # also caches, logs, etc.
|
||||
```
|
||||
"""
|
||||
console = ConsoleUtils("hdlbuild")
|
||||
|
||||
if target == "all":
|
||||
console.print("Starting clean‑all …")
|
||||
clear_directories()
|
||||
console.print("All artefacts removed.")
|
||||
else:
|
||||
console.print("Removing build artefacts …")
|
||||
clear_build_directories()
|
||||
console.print("Build artefacts removed.")
|
||||
23
src/hdlbuild/commands/dep.py
Normal file
23
src/hdlbuild/commands/dep.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import typer
|
||||
|
||||
from hdlbuild.dependencies.resolver import DependencyResolver
|
||||
from hdlbuild.utils.console_utils import ConsoleUtils
|
||||
from hdlbuild.utils.project_loader import load_project_config
|
||||
|
||||
cli = typer.Typer(rich_help_panel="🔗 Dependency Commands")
|
||||
|
||||
@cli.callback(invoke_without_command=True)
|
||||
def dep() -> None:
|
||||
"""
|
||||
Resolve all project dependencies.
|
||||
|
||||
```bash
|
||||
hdlbuild dep
|
||||
```
|
||||
"""
|
||||
console = ConsoleUtils("hdlbuild")
|
||||
project = load_project_config()
|
||||
|
||||
console.print("Resolving dependencies …")
|
||||
DependencyResolver(project).resolve_all()
|
||||
console.print("Dependencies resolved.")
|
||||
53
src/hdlbuild/commands/gen.py
Normal file
53
src/hdlbuild/commands/gen.py
Normal file
@@ -0,0 +1,53 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import typer
|
||||
|
||||
from hdlbuild.generate.template_generator import TemplateGenerator
|
||||
from hdlbuild.utils.console_utils import ConsoleUtils
|
||||
from hdlbuild.utils.project_loader import load_project_config
|
||||
|
||||
cli = typer.Typer(rich_help_panel="🧬 Template Commands")
|
||||
|
||||
@cli.command("list")
|
||||
def list_templates() -> None:
|
||||
"""
|
||||
List all available template names from *project.yml*.
|
||||
|
||||
```bash
|
||||
hdlbuild gen list
|
||||
```
|
||||
"""
|
||||
console = ConsoleUtils("hdlbuild")
|
||||
project = load_project_config()
|
||||
TemplateGenerator.list_templates(project, console)
|
||||
|
||||
|
||||
@cli.callback(invoke_without_command=True)
|
||||
def gen(
|
||||
ctx: typer.Context,
|
||||
name: str = typer.Option(
|
||||
None,
|
||||
"--name",
|
||||
"-n",
|
||||
help="Name of the template to generate (from project.yml)",
|
||||
show_default=False,
|
||||
),
|
||||
dry_run: bool = typer.Option(
|
||||
False,
|
||||
"--dry-run",
|
||||
help="Only show the output without writing file",
|
||||
),
|
||||
) -> None:
|
||||
"""
|
||||
Render HDL files from Jinja2 templates.
|
||||
|
||||
* `hdlbuild gen` → render all templates
|
||||
* `hdlbuild gen <name>` → render a specific template
|
||||
* `hdlbuild gen <name> --dry-run` → only show output without saving
|
||||
"""
|
||||
console = ConsoleUtils("hdlbuild")
|
||||
project = load_project_config()
|
||||
|
||||
# Only executed when no subcommand (e.g., "list") is active.
|
||||
if ctx.invoked_subcommand is None:
|
||||
TemplateGenerator.generate(project, name, dry_run, console)
|
||||
35
src/hdlbuild/commands/init.py
Normal file
35
src/hdlbuild/commands/init.py
Normal file
@@ -0,0 +1,35 @@
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
import typer
|
||||
|
||||
from hdlbuild.utils.console_utils import ConsoleUtils
|
||||
|
||||
cli = typer.Typer(rich_help_panel="🆕 Init Commands")
|
||||
|
||||
@cli.callback(invoke_without_command=True)
|
||||
def init() -> None:
|
||||
"""
|
||||
Initialise a new HDLBuild project in the current directory.
|
||||
|
||||
Copies `.gitignore` and `project.yml` from the template folder.
|
||||
"""
|
||||
console = ConsoleUtils("hdlbuild")
|
||||
project_dir = Path.cwd()
|
||||
|
||||
script_dir = Path(__file__).parent.resolve()
|
||||
template_dir = (script_dir / ".." / "templates").resolve()
|
||||
|
||||
files = [
|
||||
("gitignore.template", ".gitignore"),
|
||||
("project.yml.template", "project.yml"),
|
||||
]
|
||||
|
||||
for template_name, target_name in files:
|
||||
template_path = template_dir / template_name
|
||||
target_path = project_dir / target_name
|
||||
|
||||
if not target_path.exists():
|
||||
shutil.copy(template_path, target_path)
|
||||
console.print(f"Created {target_name}")
|
||||
else:
|
||||
console.print(f"{target_name} already exists – skipping.")
|
||||
31
src/hdlbuild/commands/test.py
Normal file
31
src/hdlbuild/commands/test.py
Normal file
@@ -0,0 +1,31 @@
|
||||
import typer
|
||||
|
||||
from hdlbuild.tools.xilinx_ise.isim import build_testbench, run_testbench
|
||||
from hdlbuild.utils.console_utils import ConsoleUtils
|
||||
from hdlbuild.utils.project_loader import load_project_config
|
||||
|
||||
cli = typer.Typer(rich_help_panel="🧪 Test Commands")
|
||||
|
||||
@cli.callback(invoke_without_command=True)
|
||||
def test(
|
||||
target: str = typer.Argument(
|
||||
None,
|
||||
help="Name of the test target (leave empty to run all)",
|
||||
show_default=False,
|
||||
)
|
||||
) -> None:
|
||||
"""
|
||||
Build and run testbenches.
|
||||
|
||||
```bash
|
||||
hdlbuild test # run all TBs
|
||||
hdlbuild test alu # run TB 'alu' only
|
||||
```
|
||||
"""
|
||||
console = ConsoleUtils("hdlbuild")
|
||||
project = load_project_config()
|
||||
|
||||
console.print("Starting test flow …")
|
||||
build_testbench(project, target)
|
||||
run_testbench(project, target)
|
||||
console.print("Tests finished.")
|
||||
99
src/hdlbuild/dependencies/resolver.py
Normal file
99
src/hdlbuild/dependencies/resolver.py
Normal file
@@ -0,0 +1,99 @@
|
||||
# src/hdlbuild/dependency/resolver.py
|
||||
|
||||
from typing import List, Set
|
||||
|
||||
from git import Repo
|
||||
from hdlbuild.models.config import DIRECTORIES, GIT
|
||||
from hdlbuild.models.project import ProjectConfig
|
||||
from hdlbuild.models.dependency import ResolvedDependency
|
||||
import os
|
||||
|
||||
from hdlbuild.utils.console_utils import ConsoleUtils
|
||||
from hdlbuild.utils.project_loader import load_project_config
|
||||
|
||||
class DependencyResolver:
|
||||
def __init__(self, root_project: ProjectConfig, offline_mode: bool = False):
|
||||
self.root_project = root_project
|
||||
self.offline_mode = offline_mode
|
||||
self.resolved: List[ResolvedDependency] = []
|
||||
self.visited_urls: Set[str] = set()
|
||||
self.console = ConsoleUtils(live=True)
|
||||
self.console.start_live()
|
||||
|
||||
def resolve_all(self):
|
||||
"""Startet das Auflösen aller Abhängigkeiten (rekursiv)."""
|
||||
self._resolve_project(self.root_project)
|
||||
self.console.stop_live("[bold green]Alle Abhängigkeiten aufgelöst.[/bold green]")
|
||||
|
||||
def _resolve_project(self, project: ProjectConfig):
|
||||
"""Löst die Abhängigkeiten eines einzelnen Projekts auf."""
|
||||
for dep in project.dependencies or []:
|
||||
if dep.git in self.visited_urls:
|
||||
continue
|
||||
|
||||
self.visited_urls.add(dep.git)
|
||||
|
||||
local_path = self._clone_or_use_existing(dep.git, dep.rev)
|
||||
dep_project = self._load_project_config(os.path.join(local_path, "project.yml"))
|
||||
|
||||
# Speichern als ResolvedDependency
|
||||
self.resolved.append(ResolvedDependency(project=dep_project, local_path=local_path))
|
||||
|
||||
self._resolve_project(dep_project)
|
||||
|
||||
def _clone_or_use_existing(self, git_url: str, rev: str) -> str:
|
||||
folder_name = os.path.basename(git_url.rstrip("/")).replace(".git", "")
|
||||
local_path = os.path.join(DIRECTORIES.dependency, folder_name)
|
||||
|
||||
if os.path.exists(local_path):
|
||||
# Lokales Repo vorhanden
|
||||
self.console.print(f"[bold green]Benutze vorhandenes Repository: {folder_name}[/bold green]")
|
||||
repo = Repo(local_path)
|
||||
|
||||
if not self.offline_mode:
|
||||
try:
|
||||
self.console.print(f"[bold green]Aktualisiere {folder_name}...[/bold green]")
|
||||
|
||||
# Fetch Remote Updates
|
||||
repo.remotes.origin.fetch()
|
||||
|
||||
# Prüfen, ob HEAD und origin/branch unterschiedlich sind
|
||||
local_commit = repo.head.commit
|
||||
remote_ref = repo.remotes.origin.refs[repo.active_branch.name]
|
||||
remote_commit = remote_ref.commit
|
||||
|
||||
if local_commit.hexsha != remote_commit.hexsha:
|
||||
self.console.print(f"[bold yellow]Änderungen erkannt! Force-Pull wird durchgeführt...[/bold yellow]")
|
||||
repo.git.reset('--hard', remote_commit.hexsha)
|
||||
else:
|
||||
self.console.print(f"[bold green]Repository {folder_name} ist aktuell.[/bold green]")
|
||||
|
||||
except Exception as e:
|
||||
self.console.print(f"[bold red]Warnung beim Aktualisieren: {e}[/bold red]")
|
||||
|
||||
else:
|
||||
# Lokales Repo fehlt → nur dann klonen
|
||||
if self.offline_mode:
|
||||
raise FileNotFoundError(f"Repository {folder_name} existiert lokal nicht und offline_mode ist aktiv.")
|
||||
else:
|
||||
self.console.print(f"[bold green]Klone {git_url}...[/bold green]")
|
||||
repo = Repo.clone_from(git_url, local_path)
|
||||
|
||||
# Immer: Auf den richtigen Commit/Branch wechseln
|
||||
self.console.print(f"[bold green]Checkout auf[/bold green] [yellow]{rev}[/yellow] in {folder_name}")
|
||||
repo.git.checkout(rev)
|
||||
|
||||
return local_path
|
||||
|
||||
def _load_project_config(self, path: str) -> ProjectConfig:
|
||||
"""
|
||||
Lädt eine project.yml aus einem lokalen Ordner.
|
||||
|
||||
Args:
|
||||
path (str): Basisverzeichnis des geklonten Projekts.
|
||||
|
||||
Returns:
|
||||
ProjectConfig: Das geladene Projekt.
|
||||
"""
|
||||
self.console.print(f"Lade project.yml aus {path}...")
|
||||
return load_project_config(path)
|
||||
120
src/hdlbuild/generate/template_generator.py
Normal file
120
src/hdlbuild/generate/template_generator.py
Normal file
@@ -0,0 +1,120 @@
|
||||
"""
|
||||
hdlbuild.generate.template_generator
|
||||
====================================
|
||||
|
||||
Enthält die Klasse :class:`TemplateGenerator`, die das Auflisten und Rendern
|
||||
von in *project.yml* definierten Jinja2-Templates kapselt.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import Optional
|
||||
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
|
||||
from hdlbuild.models.templates import TemplateInstance
|
||||
from hdlbuild.utils.console_utils import ConsoleUtils
|
||||
|
||||
|
||||
class TemplateGenerator:
|
||||
"""
|
||||
Hilfsklasse zum Auflisten und Rendern der im Projekt konfigurierten
|
||||
Jinja2-Templates.
|
||||
"""
|
||||
|
||||
# --------------------------------------------------------------------- #
|
||||
# Öffentliche API
|
||||
# --------------------------------------------------------------------- #
|
||||
|
||||
@staticmethod
|
||||
def list_templates(project, console: ConsoleUtils) -> None:
|
||||
"""
|
||||
Alle in *project.yml* definierten Templates auflisten.
|
||||
"""
|
||||
if not project.templates:
|
||||
console.print("[yellow]No templates defined in project.yml")
|
||||
return
|
||||
|
||||
console.print("[bold underline]Available Templates:")
|
||||
for name in project.templates.root.keys():
|
||||
console.print(f"• {name}")
|
||||
|
||||
@classmethod
|
||||
def generate(
|
||||
cls,
|
||||
project,
|
||||
name: Optional[str],
|
||||
dry_run: bool,
|
||||
console: ConsoleUtils,
|
||||
) -> None:
|
||||
"""
|
||||
Templates erzeugen.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
project
|
||||
Geladenes Projekt-Model.
|
||||
name
|
||||
Name eines einzelnen Templates oder *None*, um alle Templates
|
||||
zu erzeugen.
|
||||
dry_run
|
||||
Wenn *True*, wird das gerenderte Ergebnis nur ausgegeben,
|
||||
jedoch nicht auf die Festplatte geschrieben.
|
||||
console
|
||||
Farbige Konsolen-Ausgaben.
|
||||
"""
|
||||
if not project.templates:
|
||||
console.print("[red]No templates defined in project.yml")
|
||||
return
|
||||
|
||||
templates = project.templates.root
|
||||
|
||||
if name:
|
||||
# Ein bestimmtes Template
|
||||
if name not in templates:
|
||||
console.print(f"[red]Template '{name}' not found.")
|
||||
return
|
||||
cls._render_template(name, templates[name], dry_run, console)
|
||||
else:
|
||||
# Alle Templates durchlaufen
|
||||
for tname, template in templates.items():
|
||||
cls._render_template(tname, template, dry_run, console)
|
||||
|
||||
# --------------------------------------------------------------------- #
|
||||
# Interne Helfer
|
||||
# --------------------------------------------------------------------- #
|
||||
|
||||
@staticmethod
|
||||
def _render_template(
|
||||
name: str,
|
||||
template: TemplateInstance,
|
||||
dry_run: bool,
|
||||
console: ConsoleUtils,
|
||||
) -> None:
|
||||
"""
|
||||
Einzelnes Template rendern und wahlweise speichern.
|
||||
"""
|
||||
template_path = template.template
|
||||
output_path = template.output
|
||||
variables = template.variables
|
||||
|
||||
env = Environment(
|
||||
loader=FileSystemLoader(os.path.dirname(template_path)),
|
||||
trim_blocks=True,
|
||||
lstrip_blocks=True,
|
||||
)
|
||||
j2 = env.get_template(os.path.basename(template_path))
|
||||
result = j2.render(**variables)
|
||||
|
||||
if dry_run:
|
||||
console.print(f"[green]--- Template: {name} (dry-run) ---")
|
||||
console.print(result)
|
||||
console.print(f"[green]--- End of {name} ---")
|
||||
return
|
||||
|
||||
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
||||
with open(output_path, "w") as f:
|
||||
f.write(result)
|
||||
|
||||
console.print(f"[cyan]✔ Rendered template '{name}' → {output_path}")
|
||||
8
src/hdlbuild/models/dependency.py
Normal file
8
src/hdlbuild/models/dependency.py
Normal file
@@ -0,0 +1,8 @@
|
||||
# models/dependency.py
|
||||
|
||||
from pydantic import BaseModel
|
||||
from hdlbuild.models.project import ProjectConfig
|
||||
|
||||
class ResolvedDependency(BaseModel):
|
||||
project: ProjectConfig
|
||||
local_path: str
|
||||
@@ -1,6 +1,8 @@
|
||||
from pydantic import BaseModel, Field
|
||||
from typing import List, Optional
|
||||
|
||||
from hdlbuild.models.templates import ProjectTemplates
|
||||
|
||||
class SourceFile(BaseModel):
|
||||
path: str
|
||||
library: str = "work" # Default auf 'work'
|
||||
@@ -14,9 +16,10 @@ class ToolOptions(BaseModel):
|
||||
bitgen: List[str] = Field(default_factory=list)
|
||||
trace: List[str] = Field(default_factory=list)
|
||||
fuse: List[str] = Field(default_factory=list)
|
||||
isim: List[str] = Field(default_factory=list)
|
||||
|
||||
class Dependency(BaseModel):
|
||||
name: str
|
||||
name: Optional[str] = None # Name ist jetzt optional
|
||||
git: str
|
||||
rev: str
|
||||
library: str = "work" # Default auf 'work'
|
||||
@@ -42,6 +45,7 @@ class ProjectConfig(BaseModel):
|
||||
sources: Sources
|
||||
testbenches: Optional[Testbenches] = None
|
||||
constraints: Optional[str] = None
|
||||
templates: Optional[ProjectTemplates] = None
|
||||
build: Optional[BuildOptions] = None
|
||||
dependencies: Optional[List[Dependency]] = Field(default_factory=list)
|
||||
tool_options: Optional[ToolOptions] = ToolOptions()
|
||||
14
src/hdlbuild/models/templates.py
Normal file
14
src/hdlbuild/models/templates.py
Normal file
@@ -0,0 +1,14 @@
|
||||
from pydantic import BaseModel, Field, RootModel
|
||||
from typing import Dict, Any
|
||||
|
||||
class TemplateInstance(BaseModel):
|
||||
template: str # Pfad zur Jinja2-Vorlage
|
||||
output: str # Zielpfad
|
||||
variables: Dict[str, Any] = Field(default_factory=dict) # Variablen für Rendering
|
||||
|
||||
class ProjectTemplates(RootModel):
|
||||
"""
|
||||
Pydantic-RootModel, das die Mapping-Struktur *name → TemplateInstance*
|
||||
kapselt. In Pydantic v2 ersetzt `RootModel` die frühere `__root__`-Syntax.
|
||||
"""
|
||||
root: Dict[str, TemplateInstance] # key = Name wie „alu“, „control_unit“
|
||||
6
src/hdlbuild/templates/gitignore.template
Normal file
6
src/hdlbuild/templates/gitignore.template
Normal file
@@ -0,0 +1,6 @@
|
||||
.hdlbuild_deps/
|
||||
.working/
|
||||
reports/
|
||||
output/
|
||||
.locale/
|
||||
vhdl_ls.toml
|
||||
268
src/hdlbuild/templates/project.yml.template
Normal file
268
src/hdlbuild/templates/project.yml.template
Normal file
@@ -0,0 +1,268 @@
|
||||
name:
|
||||
topmodule:
|
||||
target_device: xc3s1200e-4-fg320
|
||||
xilinx_path: /opt/Xilinx/14.7/ISE_DS/ISE
|
||||
|
||||
constraints:
|
||||
|
||||
sources:
|
||||
vhdl:
|
||||
- path: src/*.vhd
|
||||
library: work
|
||||
|
||||
testbenches:
|
||||
vhdl:
|
||||
- path: tests/*.vhd
|
||||
library: work
|
||||
|
||||
dependencies:
|
||||
# - git: "https://git.0xmax42.io/maxp/Asynchronous-FIFO-AXI-Handshake.git"
|
||||
# rev: "hdlbuild"
|
||||
|
||||
build:
|
||||
build_dir: working
|
||||
report_dir: reports
|
||||
copy_target_dir: output
|
||||
|
||||
# Tool Optionen
|
||||
tool_options:
|
||||
common:
|
||||
- "-intstyle"
|
||||
- "xflow"
|
||||
|
||||
ngdbuild: []
|
||||
|
||||
map:
|
||||
- "-detail"
|
||||
- "-timing"
|
||||
- "-ol"
|
||||
- "high"
|
||||
|
||||
par: []
|
||||
|
||||
bitgen:
|
||||
- "-g"
|
||||
- "StartupClk:JtagClk"
|
||||
|
||||
trace:
|
||||
- "-v"
|
||||
- "3"
|
||||
- "-n"
|
||||
- "3"
|
||||
|
||||
fuse:
|
||||
- "-incremental"
|
||||
|
||||
isim:
|
||||
- "-gui"
|
||||
|
||||
xst:
|
||||
# Optimization goal: prioritize speed or area.
|
||||
# Values: Speed | Area
|
||||
- "-opt_mode Speed"
|
||||
|
||||
# Optimization level: more aggressive optimizations at level 2.
|
||||
# Values: 1 | 2
|
||||
- "-opt_level 2"
|
||||
|
||||
# Use the new XST parser (recommended for modern designs).
|
||||
# Values: yes | no
|
||||
- "-use_new_parser yes"
|
||||
|
||||
# Preserve design hierarchy or allow flattening for optimization.
|
||||
# Values: Yes | No | Soft
|
||||
- "-keep_hierarchy No"
|
||||
|
||||
# Determines how hierarchy is preserved in the netlist.
|
||||
# Values: As_Optimized | Rebuilt
|
||||
- "-netlist_hierarchy As_Optimized"
|
||||
|
||||
# Global optimization strategy for nets.
|
||||
# Values: AllClockNets | Offset_In_Before | Offset_Out_After | Inpad_To_Outpad | Max_Delay
|
||||
- "-glob_opt AllClockNets"
|
||||
|
||||
## Misc ##
|
||||
|
||||
# Enable reading of IP cores.
|
||||
# Values: YES | NO
|
||||
- "-read_cores YES"
|
||||
|
||||
# Do not write timing constraints into synthesis report.
|
||||
# Values: YES | NO
|
||||
- "-write_timing_constraints NO"
|
||||
|
||||
# Analyze paths across different clock domains.
|
||||
# Values: YES | NO
|
||||
- "-cross_clock_analysis NO"
|
||||
|
||||
# Character used to separate hierarchy levels in instance names.
|
||||
# Default: /
|
||||
- "-hierarchy_separator /"
|
||||
|
||||
# Delimiters used for bus signals.
|
||||
# Values: <> | [] | () | {}
|
||||
- "-bus_delimiter <>"
|
||||
|
||||
# Maintain original case of identifiers.
|
||||
# Values: Maintain | Upper | Lower
|
||||
- "-case Maintain"
|
||||
|
||||
# Target maximum utilization ratio for slices.
|
||||
# Values: 1–100
|
||||
- "-slice_utilization_ratio 100"
|
||||
|
||||
# Target maximum utilization ratio for BRAMs.
|
||||
# Values: 1–100
|
||||
- "-bram_utilization_ratio 100"
|
||||
|
||||
# Use Verilog 2001 syntax features.
|
||||
# Values: YES | NO
|
||||
- "-verilog2001 YES"
|
||||
|
||||
#### HDL Options ####
|
||||
|
||||
## FSM ##
|
||||
|
||||
# Extract FSMs (Finite State Machines) from HDL code.
|
||||
# Values: YES | NO
|
||||
- "-fsm_extract YES"
|
||||
|
||||
# Encoding strategy for FSMs.
|
||||
# Values: Auto | Gray | One-Hot | Johnson | Compact | Sequential | Speed1 | User
|
||||
- "-fsm_encoding Auto"
|
||||
|
||||
# Add safe logic for undefined FSM states.
|
||||
# Values: Yes | No
|
||||
- "-safe_implementation No"
|
||||
|
||||
# Structure used to implement FSMs.
|
||||
# Values: LUT | BRAM
|
||||
- "-fsm_style LUT"
|
||||
|
||||
## RAM/ROM ##
|
||||
|
||||
# Extract RAM inference from HDL.
|
||||
# Values: Yes | No
|
||||
- "-ram_extract Yes"
|
||||
|
||||
# Style used to implement RAM.
|
||||
# Values: Auto | Block | Distributed
|
||||
- "-ram_style Auto"
|
||||
|
||||
# Extract ROM inference from HDL.
|
||||
# Values: Yes | No
|
||||
- "-rom_extract Yes"
|
||||
|
||||
# Style used for implementing ROM.
|
||||
# Values: Auto | Distributed | Block
|
||||
- "-rom_style Auto"
|
||||
|
||||
# Enable or disable automatic BRAM packing.
|
||||
# Values: YES | NO
|
||||
- "-auto_bram_packing NO"
|
||||
|
||||
## MUX/Decoder/Shift Register ##
|
||||
|
||||
# Extract multiplexers where possible.
|
||||
# Values: Yes | No | Force
|
||||
- "-mux_extract Yes"
|
||||
|
||||
# Style used for implementing MUX logic.
|
||||
# Values: Auto | MUXCY | MUXF
|
||||
- "-mux_style Auto"
|
||||
|
||||
# Extract decoder logic from behavioral code.
|
||||
# Values: YES | NO
|
||||
- "-decoder_extract YES"
|
||||
|
||||
# Extract and optimize priority encoder structures.
|
||||
# Values: Yes | No | Force
|
||||
- "-priority_extract Yes"
|
||||
|
||||
# Extract shift register logic.
|
||||
# Values: YES | NO
|
||||
- "-shreg_extract YES"
|
||||
|
||||
# Extract simple shift operations into dedicated hardware.
|
||||
# Values: YES | NO
|
||||
- "-shift_extract YES"
|
||||
|
||||
## Multiplier ##
|
||||
|
||||
# Style for implementing multipliers.
|
||||
# Values: Auto | LUT | Pipe_LUT | Pipe_Block | Block
|
||||
- "-mult_style Auto"
|
||||
|
||||
## Misc ##
|
||||
|
||||
# Collapse XOR trees where beneficial.
|
||||
# Values: YES | NO
|
||||
- "-xor_collapse YES"
|
||||
|
||||
# Share resources like adders or multipliers between logic blocks.
|
||||
# Values: YES | NO | Force
|
||||
- "-resource_sharing YES"
|
||||
|
||||
# Convert asynchronous resets to synchronous where possible.
|
||||
# Values: YES | NO
|
||||
- "-async_to_sync NO"
|
||||
|
||||
#### Xilinx Specific Options ####
|
||||
|
||||
## Optimization ##
|
||||
|
||||
# Enable removal of logically equivalent registers.
|
||||
# Values: YES | NO
|
||||
- "-equivalent_register_removal YES"
|
||||
|
||||
# Duplicate registers to reduce fanout or improve timing.
|
||||
# Values: YES | NO
|
||||
- "-register_duplication YES"
|
||||
|
||||
# Move registers across logic to balance timing.
|
||||
# Values: Yes | No | Forward | Backward
|
||||
- "-register_balancing No"
|
||||
|
||||
# Use clock enable signals where possible.
|
||||
# Values: Auto | Yes | No
|
||||
- "-use_clock_enable Yes"
|
||||
|
||||
# Use synchronous set (preset) signals when available.
|
||||
# Values: Auto | Yes | No
|
||||
- "-use_sync_set Yes"
|
||||
|
||||
# Use synchronous reset signals where possible.
|
||||
# Values: Auto | Yes | No
|
||||
- "-use_sync_reset Yes"
|
||||
|
||||
## I/O ##
|
||||
|
||||
# Insert IO buffers for top-level ports.
|
||||
# Values: YES | NO
|
||||
- "-iobuf YES"
|
||||
|
||||
# Placement strategy for IOB registers (Auto = let tools decide).
|
||||
# Values: Auto | YES | NO
|
||||
- "-iob Auto"
|
||||
|
||||
## Misc ##
|
||||
|
||||
# Maximum allowed fanout for a net.
|
||||
# Values: integer (e.g., 500)
|
||||
- "-max_fanout 500"
|
||||
|
||||
# Maximum number of BUFGs (global buffers) to use.
|
||||
# Values: 0–32 (device-dependent)
|
||||
- "-bufg 24"
|
||||
|
||||
# Enable logic packing into slices.
|
||||
# Values: YES | NO
|
||||
- "-slice_packing YES"
|
||||
|
||||
# Try to reduce the number of primitive instances used.
|
||||
# Values: YES | NO
|
||||
- "-optimize_primitives NO"
|
||||
|
||||
# Margin in percent beyond the target slice utilization.
|
||||
# Values: 0–100
|
||||
- "-slice_utilization_ratio_maxmargin 5"
|
||||
@@ -2,9 +2,9 @@ import subprocess
|
||||
import os
|
||||
import shutil
|
||||
from typing import Optional
|
||||
from models.project import ProjectConfig
|
||||
from models.config import DIRECTORIES
|
||||
from tools.xilinx_ise.common import copy_file, run_tool
|
||||
from hdlbuild.models.project import ProjectConfig
|
||||
from hdlbuild.models.config import DIRECTORIES
|
||||
from hdlbuild.tools.xilinx_ise.common import copy_file, run_tool
|
||||
|
||||
def run_bitgen(project: ProjectConfig):
|
||||
run_tool(
|
||||
@@ -23,7 +23,8 @@ def copy_bitstream_file(project: ProjectConfig):
|
||||
copy_file(
|
||||
project=project,
|
||||
source_filename=f"{project.name}.bit",
|
||||
destination_filename=f"{project.name}.Bitstream",
|
||||
destination_filename=f"{project.name}.bit",
|
||||
destination_dir=DIRECTORIES.copy_target,
|
||||
description="Bitstream File",
|
||||
step_number=10, total_steps=12
|
||||
)
|
||||
@@ -1,9 +1,9 @@
|
||||
import shutil
|
||||
import os
|
||||
from typing import Optional, List
|
||||
from models.project import ProjectConfig
|
||||
from models.config import DIRECTORIES
|
||||
from utils.console_utils import ConsoleTask, ConsoleUtils
|
||||
from hdlbuild.models.project import ProjectConfig
|
||||
from hdlbuild.models.config import DIRECTORIES
|
||||
from hdlbuild.utils.console_utils import ConsoleTask, ConsoleUtils
|
||||
from rich.console import Console
|
||||
|
||||
def run_tool(
|
||||
@@ -46,6 +46,7 @@ def copy_file(
|
||||
source_filename: str,
|
||||
destination_filename: str,
|
||||
description: str = "Report",
|
||||
destination_dir: str = DIRECTORIES.report,
|
||||
step_number: Optional[int] = None,
|
||||
total_steps: Optional[int] = None
|
||||
):
|
||||
@@ -59,12 +60,12 @@ def copy_file(
|
||||
description (str): Optionale Beschreibung für die Ausgabe (z.B. "Synthesis Report")
|
||||
"""
|
||||
src_path = os.path.join(DIRECTORIES.build, source_filename)
|
||||
dst_path = os.path.join(DIRECTORIES.report, destination_filename)
|
||||
dst_path = os.path.join(destination_dir, destination_filename)
|
||||
|
||||
if not os.path.exists(src_path):
|
||||
raise FileNotFoundError(f"{description} nicht gefunden: {src_path}")
|
||||
|
||||
os.makedirs(DIRECTORIES.report, exist_ok=True)
|
||||
os.makedirs(destination_dir, exist_ok=True)
|
||||
|
||||
shutil.copyfile(src_path, dst_path)
|
||||
|
||||
140
src/hdlbuild/tools/xilinx_ise/isim.py
Normal file
140
src/hdlbuild/tools/xilinx_ise/isim.py
Normal file
@@ -0,0 +1,140 @@
|
||||
import os
|
||||
from typing import List
|
||||
|
||||
from hdlbuild.models.project import ProjectConfig
|
||||
from hdlbuild.models.config import DIRECTORIES
|
||||
from hdlbuild.dependencies.resolver import DependencyResolver
|
||||
from hdlbuild.tools.xilinx_ise.common import run_tool
|
||||
from hdlbuild.utils.console_utils import ConsoleTask
|
||||
from hdlbuild.utils.source_resolver import expand_all_sources, expand_testbenches
|
||||
|
||||
def generate_simulation_project_file(project: ProjectConfig, output_path: str, testbench_name: str):
|
||||
"""
|
||||
Generiert die ISim Simulationsprojektdatei (.prj).
|
||||
|
||||
Args:
|
||||
project (ProjectConfig): Das Hauptprojekt.
|
||||
output_path (str): Zielpfad für die .prj Datei.
|
||||
testbench_name (str): Name der Testbench-Datei (z.B. "VGATimingGenerator_test_tb").
|
||||
"""
|
||||
resolver = DependencyResolver(project, offline_mode=True)
|
||||
resolver.resolve_all()
|
||||
|
||||
vhdl_sources, verilog_sources = expand_all_sources(project, resolver.resolved)
|
||||
|
||||
with open(output_path, "w") as f:
|
||||
# Normale VHDL-Sources
|
||||
for lib, file in vhdl_sources:
|
||||
f.write(f"vhdl {lib} \"{DIRECTORIES.get_relative_prefix()}{file}\"\n")
|
||||
|
||||
# Normale Verilog-Sources
|
||||
for lib, file in verilog_sources:
|
||||
f.write(f"verilog {lib} \"{DIRECTORIES.get_relative_prefix()}{file}\"\n")
|
||||
|
||||
# Testbench-Datei suchen und einfügen
|
||||
testbench_file = find_testbench_file(project, testbench_name)
|
||||
normalized_tb = os.path.normpath(testbench_file)
|
||||
f.write(f"vhdl work \"{DIRECTORIES.get_relative_prefix()}{normalized_tb}\"\n")
|
||||
|
||||
# glbl.v immer zuletzt
|
||||
f.write(f"verilog work /opt/Xilinx/14.7/ISE_DS/ISE/verilog/src/glbl.v\n")
|
||||
|
||||
|
||||
def find_testbench_file(project: ProjectConfig, testbench_name: str) -> str:
|
||||
"""
|
||||
Findet eine Testbench-Datei im Projekt anhand ihres Namens (ohne Endung, Case-Insensitive).
|
||||
|
||||
Args:
|
||||
project (ProjectConfig): Projektdefinition.
|
||||
testbench_name (str): Gesuchter Dateiname (z.B. "VGATimingGenerator_test_tb").
|
||||
|
||||
Returns:
|
||||
str: Vollständiger Pfad zur Testbench-Datei.
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: Wenn die Datei nicht gefunden wurde.
|
||||
"""
|
||||
candidates = expand_testbenches(project)
|
||||
|
||||
# Vergleichswerte vorbereiten (Name ohne Endung, in Kleinbuchstaben)
|
||||
search_name = os.path.splitext(testbench_name)[0].lower()
|
||||
|
||||
for _, filepath in candidates:
|
||||
filename = os.path.basename(filepath)
|
||||
filename_no_ext = os.path.splitext(filename)[0].lower()
|
||||
|
||||
if filename_no_ext == search_name:
|
||||
return filepath
|
||||
|
||||
raise FileNotFoundError(f"Testbench '{testbench_name}' wurde nicht gefunden.")
|
||||
|
||||
def build_testbench(project: ProjectConfig, testbench_name: str):
|
||||
"""
|
||||
Baut eine einzelne Testbench mit FUSE.
|
||||
|
||||
Args:
|
||||
project (ProjectConfig): Hauptprojekt-Konfiguration
|
||||
testbench_name (str): Name der Testbench-Datei, z.B. "VGATimingGenerator_test_tb.vhd"
|
||||
"""
|
||||
# Pfade
|
||||
isim_exe_name = f"isim_{testbench_name.replace('.vhd', '').replace('.v', '')}"
|
||||
isim_exe_path = os.path.join(DIRECTORIES.build, isim_exe_name)
|
||||
|
||||
# 1. Simulation-Projektdatei generieren
|
||||
generate_simulation_project_file(
|
||||
project=project,
|
||||
output_path=os.path.join(DIRECTORIES.build, f"{project.name}_sim.prj"),
|
||||
testbench_name=testbench_name
|
||||
)
|
||||
|
||||
# 2. FUSE-Befehl ausführen mit `run_tool`
|
||||
mandatory_arguments = [
|
||||
"-prj", f"{project.name}_sim.prj",
|
||||
"-o", isim_exe_name,
|
||||
f"work.{testbench_name.replace('.vhd', '').replace('.v', '')}",
|
||||
"work.glbl"
|
||||
]
|
||||
|
||||
run_tool(
|
||||
project=project,
|
||||
tool_executable_name="fuse",
|
||||
tool_option_attr="fuse",
|
||||
mandatory_arguments=mandatory_arguments,
|
||||
working_dir=DIRECTORIES.build,
|
||||
silent=False
|
||||
)
|
||||
|
||||
def run_testbench(project: ProjectConfig, testbench_name: str):
|
||||
"""
|
||||
Führt eine gebaute Testbench-Executable aus (ISim Simulation).
|
||||
|
||||
Args:
|
||||
testbench_name (str): Name der Testbench-Datei (z.B. "VGATimingGenerator_test_tb.vhd")
|
||||
"""
|
||||
# Pfade
|
||||
isim_exe_name = f"isim_{testbench_name.replace('.vhd', '').replace('.v', '')}"
|
||||
|
||||
isim_cmd_file = os.path.join(DIRECTORIES.build, f"{isim_exe_name}.cmd")
|
||||
|
||||
# 1. TCL-Skript für ISim erzeugen (einfache Simulation)
|
||||
with open(isim_cmd_file, "w") as f:
|
||||
f.write("")
|
||||
|
||||
cmd = [f"./{isim_exe_name}"]
|
||||
|
||||
tool_opts = getattr(project.tool_options, "isim", [])
|
||||
if tool_opts:
|
||||
cmd.extend(tool_opts)
|
||||
|
||||
# 2. Kommando bauen
|
||||
cmd.extend([
|
||||
"-tclbatch",
|
||||
f"{isim_exe_name}.cmd"
|
||||
])
|
||||
|
||||
# 3. Ausführen
|
||||
task = ConsoleTask(prefix="hdlbuild", title=f"RUN {testbench_name}")
|
||||
result = task.run_command(cmd, cwd=DIRECTORIES.build)
|
||||
|
||||
if result != 0:
|
||||
raise RuntimeError(f"Testbench {testbench_name} ist während der Simulation fehlgeschlagen!")
|
||||
@@ -1,11 +1,11 @@
|
||||
from models.config import DIRECTORIES
|
||||
from models.project import ProjectConfig
|
||||
from tools.xilinx_ise.bitgen import copy_bitstream_file, run_bitgen
|
||||
from tools.xilinx_ise.map import copy_map_report, run_map
|
||||
from tools.xilinx_ise.ngdbuild import run_ngdbuild
|
||||
from tools.xilinx_ise.par import copy_par_report, copy_pinout_report, run_par
|
||||
from tools.xilinx_ise.trace import copy_trace_report, run_trace
|
||||
from tools.xilinx_ise.xst import copy_synthesis_report, generate_xst_project_file, generate_xst_script_file, run_xst
|
||||
from hdlbuild.models.config import DIRECTORIES
|
||||
from hdlbuild.models.project import ProjectConfig
|
||||
from hdlbuild.tools.xilinx_ise.bitgen import copy_bitstream_file, run_bitgen
|
||||
from hdlbuild.tools.xilinx_ise.map import copy_map_report, run_map
|
||||
from hdlbuild.tools.xilinx_ise.ngdbuild import run_ngdbuild
|
||||
from hdlbuild.tools.xilinx_ise.par import copy_par_report, copy_pinout_report, run_par
|
||||
from hdlbuild.tools.xilinx_ise.trace import copy_trace_report, run_trace
|
||||
from hdlbuild.tools.xilinx_ise.xst import copy_synthesis_report, generate_xst_project_file, generate_xst_script_file, run_xst
|
||||
|
||||
|
||||
def xilinx_ise_synth(project: ProjectConfig):
|
||||
@@ -2,9 +2,9 @@ import subprocess
|
||||
import os
|
||||
import shutil
|
||||
from typing import Optional
|
||||
from models.project import ProjectConfig
|
||||
from models.config import DIRECTORIES
|
||||
from tools.xilinx_ise.common import copy_file, run_tool
|
||||
from hdlbuild.models.project import ProjectConfig
|
||||
from hdlbuild.models.config import DIRECTORIES
|
||||
from hdlbuild.tools.xilinx_ise.common import copy_file, run_tool
|
||||
|
||||
def run_map(project: ProjectConfig):
|
||||
run_tool(
|
||||
@@ -1,9 +1,9 @@
|
||||
import subprocess
|
||||
import os
|
||||
from typing import Optional
|
||||
from models.project import ProjectConfig
|
||||
from models.config import DIRECTORIES
|
||||
from tools.xilinx_ise.common import run_tool
|
||||
from hdlbuild.models.project import ProjectConfig
|
||||
from hdlbuild.models.config import DIRECTORIES
|
||||
from hdlbuild.tools.xilinx_ise.common import run_tool
|
||||
|
||||
def run_ngdbuild(project: ProjectConfig):
|
||||
run_tool(
|
||||
@@ -2,9 +2,9 @@ import subprocess
|
||||
import shutil
|
||||
import os
|
||||
from typing import Optional
|
||||
from models.project import ProjectConfig
|
||||
from models.config import DIRECTORIES
|
||||
from tools.xilinx_ise.common import copy_file, run_tool
|
||||
from hdlbuild.models.project import ProjectConfig
|
||||
from hdlbuild.models.config import DIRECTORIES
|
||||
from hdlbuild.tools.xilinx_ise.common import copy_file, run_tool
|
||||
|
||||
def run_par(project: ProjectConfig):
|
||||
run_tool(
|
||||
@@ -2,9 +2,9 @@ import subprocess
|
||||
import os
|
||||
import shutil
|
||||
from typing import Optional
|
||||
from models.project import ProjectConfig
|
||||
from models.config import DIRECTORIES
|
||||
from tools.xilinx_ise.common import copy_file, run_tool
|
||||
from hdlbuild.models.project import ProjectConfig
|
||||
from hdlbuild.models.config import DIRECTORIES
|
||||
from hdlbuild.tools.xilinx_ise.common import copy_file, run_tool
|
||||
|
||||
def run_trace(project: ProjectConfig):
|
||||
run_tool(
|
||||
@@ -1,8 +1,9 @@
|
||||
from typing import Optional
|
||||
from models.config import DIRECTORIES
|
||||
from tools.xilinx_ise.common import copy_file, run_tool
|
||||
from utils.source_resolver import expand_sources
|
||||
from models.project import ProjectConfig
|
||||
from hdlbuild.dependencies.resolver import DependencyResolver
|
||||
from hdlbuild.models.config import DIRECTORIES
|
||||
from hdlbuild.tools.xilinx_ise.common import copy_file, run_tool
|
||||
from hdlbuild.utils.source_resolver import expand_all_sources
|
||||
from hdlbuild.models.project import ProjectConfig
|
||||
import subprocess
|
||||
import os
|
||||
import shutil
|
||||
@@ -12,12 +13,15 @@ def generate_xst_project_file(project: ProjectConfig, output_path: str):
|
||||
Generiert die XST .prj-Datei mit allen Quellcodes.
|
||||
"""
|
||||
with open(output_path, "w") as f:
|
||||
# VHDL-Sources
|
||||
for lib, src in expand_sources(project.sources.vhdl):
|
||||
f.write(f"vhdl {lib} \"{DIRECTORIES.get_relative_prefix()}/{src}\"\n")
|
||||
# Verilog-Sources
|
||||
for lib, src in expand_sources(project.sources.verilog):
|
||||
f.write(f"verilog {lib} \"{DIRECTORIES.get_relative_prefix()}/{src}\"\n")
|
||||
resolver = DependencyResolver(project, offline_mode=True)
|
||||
resolver.resolve_all()
|
||||
vhdl_sources, verilog_sources = expand_all_sources(project, resolver.resolved)
|
||||
|
||||
for lib, file in vhdl_sources:
|
||||
f.write(f"vhdl {lib} \"{DIRECTORIES.get_relative_prefix()}{file}\"\n")
|
||||
|
||||
for lib, file in verilog_sources:
|
||||
f.write(f"verilog {lib} \"{DIRECTORIES.get_relative_prefix()}{file}\"\n")
|
||||
|
||||
# Optionale Dependencies
|
||||
if project.dependencies:
|
||||
@@ -1,3 +1,4 @@
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import subprocess
|
||||
@@ -6,6 +7,7 @@ from typing import List, Optional
|
||||
from rich.console import Console
|
||||
from rich.live import Live
|
||||
from rich.text import Text
|
||||
from rich.markup import render
|
||||
|
||||
class ConsoleTask:
|
||||
def __init__(self, prefix:str, title: str, step_number: Optional[int] = None, total_steps: Optional[int] = None, max_log_lines: int = 10):
|
||||
@@ -18,6 +20,7 @@ class ConsoleTask:
|
||||
self.stop_event = threading.Event()
|
||||
self.spinner_thread: Optional[threading.Thread] = None
|
||||
self.output_lines: List[str] = []
|
||||
self.all_lines: List[str] = []
|
||||
self._stdout_lock = threading.Lock()
|
||||
self.console = Console()
|
||||
self.live: Optional[Live] = None
|
||||
@@ -58,6 +61,7 @@ class ConsoleTask:
|
||||
|
||||
def log(self, message: str):
|
||||
with self._stdout_lock:
|
||||
self.all_lines.append(message)
|
||||
self.output_lines.append(message)
|
||||
if len(self.output_lines) > self.max_log_lines:
|
||||
self.output_lines = self.output_lines[-self.max_log_lines:]
|
||||
@@ -89,12 +93,7 @@ class ConsoleTask:
|
||||
self.log(line.rstrip())
|
||||
|
||||
success = (process.returncode == 0)
|
||||
if not success:
|
||||
raise subprocess.CalledProcessError(process.returncode, cmd)
|
||||
|
||||
except subprocess.CalledProcessError:
|
||||
success = False
|
||||
raise
|
||||
finally:
|
||||
self.stop_event.set()
|
||||
if self.spinner_thread:
|
||||
@@ -102,11 +101,18 @@ class ConsoleTask:
|
||||
|
||||
duration = time.time() - start_time
|
||||
|
||||
# Finalize output
|
||||
with self._stdout_lock:
|
||||
self._finalize_output(success, duration)
|
||||
|
||||
return 0 if success else 1
|
||||
if not success:
|
||||
# Schöne Fehlerausgabe und kontrolliertes Beenden
|
||||
self.console.print("\n[bold red]❌ Fehler beim Ausführen des Kommandos:[/bold red]")
|
||||
for line in self.all_lines:
|
||||
self.console.print(f"[red]{line}[/red]")
|
||||
sys.exit(1) # ❗ Hier: hartes, aber sauberes Beenden des Programms
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def _finalize_output(self, success: bool, duration: float):
|
||||
if self.live:
|
||||
@@ -122,18 +128,49 @@ class ConsoleTask:
|
||||
self.console.print(final_line)
|
||||
|
||||
class ConsoleUtils:
|
||||
def __init__(self,
|
||||
prefix: str = "",
|
||||
def __init__(
|
||||
self,
|
||||
prefix: str = "hdlbuild",
|
||||
step_number: Optional[int] = None,
|
||||
total_steps: Optional[int] = None
|
||||
total_steps: Optional[int] = None,
|
||||
live: bool = False
|
||||
):
|
||||
self.prefix = prefix
|
||||
self.step_number = step_number
|
||||
self.total_steps = total_steps
|
||||
self.console = Console()
|
||||
self.live_mode = live
|
||||
self.live: Optional[Live] = None
|
||||
self.messages: List[str] = []
|
||||
|
||||
def start_live(self):
|
||||
"""Startet den Live-Modus."""
|
||||
if self.live_mode and self.live is None:
|
||||
self.live = Live(console=self.console, refresh_per_second=10, transient=True)
|
||||
self.live.start()
|
||||
|
||||
def print(self, message: str):
|
||||
prefix = f"[grey50]\[{self.prefix}][/grey50]" if self.prefix else ""
|
||||
step_info = f"[bold blue]Step {self.step_number}/{self.total_steps}[/bold blue]" if self.step_number and self.total_steps else ""
|
||||
message_text = f"{prefix} {step_info} {message}"
|
||||
self.console.print(message_text)
|
||||
full_message = f"{prefix} {step_info} {message}"
|
||||
|
||||
if self.live_mode and self.live:
|
||||
self.messages.append(full_message)
|
||||
rendered_lines = [Text.from_markup(line) for line in self.messages]
|
||||
combined = Text()
|
||||
for line in rendered_lines:
|
||||
combined.append(line)
|
||||
combined.append("\n")
|
||||
self.live.update(combined)
|
||||
else:
|
||||
self.console.print(full_message)
|
||||
|
||||
def stop_live(self, final_message: Optional[str] = None):
|
||||
"""Beendet den Live-Modus, löscht alte Ausgaben und zeigt eine Abschlussnachricht."""
|
||||
if self.live_mode and self.live:
|
||||
self.live.stop()
|
||||
self.live = None
|
||||
self.messages.clear() # Alte Messages verwerfen
|
||||
|
||||
if final_message:
|
||||
self.console.print(final_message)
|
||||
@@ -1,7 +1,7 @@
|
||||
import os
|
||||
import shutil
|
||||
from models.config import DIRECTORIES
|
||||
from utils.console_utils import ConsoleUtils
|
||||
from hdlbuild.models.config import DIRECTORIES
|
||||
from hdlbuild.utils.console_utils import ConsoleUtils
|
||||
|
||||
def ensure_directories_exist(silent: bool = False):
|
||||
"""
|
||||
@@ -1,5 +1,5 @@
|
||||
import yaml
|
||||
from models.project import ProjectConfig
|
||||
from hdlbuild.models.project import ProjectConfig
|
||||
|
||||
def load_project_config(path: str = "project.yml") -> ProjectConfig:
|
||||
"""
|
||||
100
src/hdlbuild/utils/source_resolver.py
Normal file
100
src/hdlbuild/utils/source_resolver.py
Normal file
@@ -0,0 +1,100 @@
|
||||
# src/hdlbuild/utils/source_resolver.py
|
||||
|
||||
import glob
|
||||
import os
|
||||
from typing import List, Tuple
|
||||
from hdlbuild.models.project import SourceFile, ProjectConfig
|
||||
from hdlbuild.models.dependency import ResolvedDependency
|
||||
|
||||
def _expand_project_sources(project: ProjectConfig, project_root: str) -> Tuple[List[Tuple[str, str]], List[Tuple[str, str]]]:
|
||||
"""
|
||||
Expandiert die Quellen eines einzelnen Projekts, getrennt nach VHDL und Verilog.
|
||||
|
||||
Args:
|
||||
project (ProjectConfig): Das Projekt, dessen Quellen expandiert werden sollen.
|
||||
project_root (str): Basisverzeichnis, von dem aus die Pfade aufgelöst werden.
|
||||
|
||||
Returns:
|
||||
Tuple: (List of (library, filepath) für VHDL, List of (library, filepath) für Verilog)
|
||||
"""
|
||||
vhdl_expanded = []
|
||||
verilog_expanded = []
|
||||
|
||||
# VHDL-Sources
|
||||
for source in project.sources.vhdl:
|
||||
full_pattern = os.path.join(project_root, source.path)
|
||||
matched_files = glob.glob(full_pattern, recursive=True)
|
||||
for file in matched_files:
|
||||
normalized_path = os.path.normpath(file)
|
||||
vhdl_expanded.append((source.library, normalized_path))
|
||||
|
||||
# Verilog-Sources
|
||||
for source in project.sources.verilog:
|
||||
full_pattern = os.path.join(project_root, source.path)
|
||||
matched_files = glob.glob(full_pattern, recursive=True)
|
||||
for file in matched_files:
|
||||
normalized_path = os.path.normpath(file)
|
||||
verilog_expanded.append((source.library, normalized_path))
|
||||
|
||||
return vhdl_expanded, verilog_expanded
|
||||
|
||||
def expand_all_sources(root_project: ProjectConfig, resolved_dependencies: List[ResolvedDependency]) -> Tuple[List[Tuple[str, str]], List[Tuple[str, str]]]:
|
||||
"""
|
||||
Expandiert alle Quellen aus dem Root-Projekt und allen Dependencies, getrennt nach VHDL und Verilog.
|
||||
|
||||
Args:
|
||||
root_project (ProjectConfig): Das Hauptprojekt
|
||||
resolved_dependencies (List[ResolvedDependency]): Alle rekursiv aufgelösten Dependencies
|
||||
|
||||
Returns:
|
||||
Tuple:
|
||||
- List of (library, filepath) für VHDL
|
||||
- List of (library, filepath) für Verilog
|
||||
"""
|
||||
all_vhdl_sources = []
|
||||
all_verilog_sources = []
|
||||
|
||||
# Root-Projekt expandieren
|
||||
vhdl_sources, verilog_sources = _expand_project_sources(root_project, ".")
|
||||
all_vhdl_sources.extend(vhdl_sources)
|
||||
all_verilog_sources.extend(verilog_sources)
|
||||
|
||||
# Dependencies expandieren
|
||||
for dep in resolved_dependencies:
|
||||
vhdl_dep, verilog_dep = _expand_project_sources(dep.project, dep.local_path)
|
||||
all_vhdl_sources.extend(vhdl_dep)
|
||||
all_verilog_sources.extend(verilog_dep)
|
||||
|
||||
return all_vhdl_sources, all_verilog_sources
|
||||
|
||||
|
||||
def expand_testbenches(project: ProjectConfig) -> List[Tuple[str, str]]:
|
||||
"""
|
||||
Expandiert nur die Testbenches (vhdl und verilog) aus dem Hauptprojekt.
|
||||
|
||||
Args:
|
||||
project (ProjectConfig): Das Hauptprojekt.
|
||||
|
||||
Returns:
|
||||
List of (library, filepath) Tupel
|
||||
"""
|
||||
expanded = []
|
||||
|
||||
if project.testbenches:
|
||||
# VHDL Testbenches
|
||||
for source in project.testbenches.vhdl:
|
||||
full_pattern = os.path.join(".", source.path)
|
||||
matched_files = glob.glob(full_pattern, recursive=True)
|
||||
for file in matched_files:
|
||||
normalized = os.path.normpath(file)
|
||||
expanded.append((source.library, normalized))
|
||||
|
||||
# Verilog Testbenches (optional)
|
||||
for source in project.testbenches.verilog:
|
||||
full_pattern = os.path.join(".", source.path)
|
||||
matched_files = glob.glob(full_pattern, recursive=True)
|
||||
for file in matched_files:
|
||||
normalized = os.path.normpath(file)
|
||||
expanded.append((source.library, normalized))
|
||||
|
||||
return expanded
|
||||
@@ -1,19 +0,0 @@
|
||||
import glob
|
||||
import os
|
||||
from typing import List, Tuple
|
||||
from models.project import SourceFile
|
||||
|
||||
def expand_sources(sources: List[SourceFile]) -> List[Tuple[str, str]]:
|
||||
"""
|
||||
Expandiert eine Liste von SourceFile-Objekten mit Wildcards in echte Pfade.
|
||||
|
||||
Returns:
|
||||
List of (library, filepath)
|
||||
"""
|
||||
expanded = []
|
||||
for source in sources:
|
||||
matched_files = glob.glob(source.path, recursive=True)
|
||||
for file in matched_files:
|
||||
normalized_path = os.path.normpath(file)
|
||||
expanded.append((source.library, normalized_path))
|
||||
return expanded
|
||||
Reference in New Issue
Block a user