Compare commits
78 Commits
5c4cab2bb1
...
v0.6.1
| Author | SHA1 | Date | |
|---|---|---|---|
| 5d8d995a04 | |||
|
36f6e7314a
|
|||
| bc8ae46b4a | |||
|
dde2363ad7
|
|||
|
32e3b51a77
|
|||
|
da387f2ee6
|
|||
|
08679c2680
|
|||
|
7b6f9ef224
|
|||
|
6ca389d5cb
|
|||
|
1d7bc19965
|
|||
|
175bf4882a
|
|||
|
0d26c42f8a
|
|||
|
fa7e738b7e
|
|||
|
60c4ca19b1
|
|||
|
38d4c7d5f3
|
|||
|
9d33b6e8be
|
|||
|
dde3b5dc5d
|
|||
|
6d6b731033
|
|||
|
8196eb3929
|
|||
|
5083852c29
|
|||
|
215e4aa545
|
|||
|
d78bfcc408
|
|||
|
8ed550f451
|
|||
|
37b684fd2d
|
|||
|
7cf69aa16f
|
|||
|
9274461d7a
|
|||
|
b79b7559f2
|
|||
| 22266f2947 | |||
| c7b2970949 | |||
| 6271eb0998 | |||
| 308de700a2 | |||
| f1315dc458 | |||
| ba8cc2ef94 | |||
| f87b08441f | |||
| 52f5a8fe24 | |||
| 836a5bf643 | |||
| ffd8d6eac9 | |||
| 44f2f15dc5 | |||
| d6d792c9e6 | |||
| 07922f8a54 | |||
| 29d04a25f0 | |||
| bbe5d7f2b1 | |||
| 84800c25a5 | |||
| e58c0897ce | |||
| 277a16e126 | |||
| ad4ba6e412 | |||
| 2f3e0d375f | |||
| 528907fabe | |||
| fae19340cc | |||
| a57062a3f8 | |||
| cef035329a | |||
| 93e67c0380 | |||
| 8caafe862e | |||
| cfa62a5624 | |||
| 263c3fac5e | |||
| 961d182bf7 | |||
| 5263a31c07 | |||
| fb5c3127b5 | |||
| 5061ee218f | |||
| dc0d8242a8 | |||
| 28406028c6 | |||
| f9d5e3c535 | |||
| 4f1f2e7d51 | |||
| 3cf3fc1437 | |||
| 12d9f4b6c9 | |||
| 90859715c4 | |||
| cc82d883c0 | |||
| 72699ed32f | |||
| 690decb33b | |||
| 2e2d86cfc2 | |||
| decc18ac83 | |||
| 5c52c0cf59 | |||
| 91f4f03d97 | |||
| 7a03040926 | |||
| f0100234c9 | |||
| cfca986f72 | |||
| defd2345b1 | |||
| 8b735c0d97 |
5
.gitea/default_merge_message/MERGE_TEMPLATE.md
Normal file
5
.gitea/default_merge_message/MERGE_TEMPLATE.md
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
chore(pr): ${PullRequestTitle} ${PullRequestReference}
|
||||||
|
|
||||||
|
${PullRequestDescription}
|
||||||
|
|
||||||
|
Merged from ${HeadBranch} into ${BaseBranch}
|
||||||
45
.gitea/scripts/cleanup_versions.sh
Executable file
45
.gitea/scripts/cleanup_versions.sh
Executable file
@@ -0,0 +1,45 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# cleanup_dev_versions.sh - Delete old PyPI dev versions from Gitea package registry
|
||||||
|
|
||||||
|
# Required environment variables
|
||||||
|
USERNAME="${TWINE_USERNAME}"
|
||||||
|
TOKEN="${TWINE_PASSWORD}"
|
||||||
|
REPO="${GITHUB_REPOSITORY}" # e.g., maxp/repocat
|
||||||
|
API_BASE="${GITHUB_API_URL%/}" # Strip trailing slash if present
|
||||||
|
|
||||||
|
OWNER="${REPO%%/*}"
|
||||||
|
PACKAGE_NAME="${REPO##*/}"
|
||||||
|
API_URL="${API_BASE}/packages/${OWNER}/pypi/${PACKAGE_NAME}"
|
||||||
|
|
||||||
|
# Fetch the list of versions
|
||||||
|
response=$(curl -s -u "$USERNAME:$TOKEN" "$API_URL")
|
||||||
|
|
||||||
|
# Extract all .dev versions, sort by creation time
|
||||||
|
mapfile -t versions_to_delete < <(echo "$response" | jq -r '
|
||||||
|
map(select(.version | test("\\.dev"))) |
|
||||||
|
sort_by(.created_at) |
|
||||||
|
.[0:-1][] |
|
||||||
|
.version')
|
||||||
|
|
||||||
|
# Determine latest version to keep
|
||||||
|
latest_version=$(echo "$response" | jq -r '
|
||||||
|
map(select(.version | test("\\.dev"))) |
|
||||||
|
sort_by(.created_at) |
|
||||||
|
last.version')
|
||||||
|
|
||||||
|
if [[ -z "$latest_version" || ${#versions_to_delete[@]} -eq 0 ]]; then
|
||||||
|
echo "No old .dev versions to delete."
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Keeping latest .dev version: $latest_version"
|
||||||
|
|
||||||
|
# Delete old .dev versions
|
||||||
|
for version in "${versions_to_delete[@]}"; do
|
||||||
|
echo "Deleting old .dev version: $version"
|
||||||
|
curl -s -X DELETE -u "$USERNAME:$TOKEN" "$API_URL/$version"
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "Cleanup complete."
|
||||||
21
.gitea/scripts/get-release-id.sh
Executable file
21
.gitea/scripts/get-release-id.sh
Executable file
@@ -0,0 +1,21 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Eingaben
|
||||||
|
TAG="$1"
|
||||||
|
TOKEN="${ACTIONS_RUNTIME_TOKEN:-<fallback_token>}"
|
||||||
|
REPO="${GITHUB_REPOSITORY:-owner/example}"
|
||||||
|
API="${GITHUB_API_URL:-https://gitea.example.tld/api/v1}"
|
||||||
|
|
||||||
|
OWNER=$(echo "$REPO" | cut -d/ -f1)
|
||||||
|
NAME=$(echo "$REPO" | cut -d/ -f2)
|
||||||
|
|
||||||
|
RESPONSE=$(curl -sf \
|
||||||
|
-H "Authorization: token $TOKEN" \
|
||||||
|
"$API/repos/$OWNER/$NAME/releases/tags/$TAG")
|
||||||
|
|
||||||
|
RELEASE_ID=$(echo "$RESPONSE" | jq -r '.id')
|
||||||
|
echo "Release-ID für $TAG ist: $RELEASE_ID"
|
||||||
|
|
||||||
|
# Für GitHub Actions als Umgebungsvariable
|
||||||
|
echo "GT_RELEASE_ID=$RELEASE_ID" >> "$GITHUB_ENV"
|
||||||
14
.gitea/scripts/set_poetry_version.sh
Executable file
14
.gitea/scripts/set_poetry_version.sh
Executable file
@@ -0,0 +1,14 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
BASE_VERSION=$(cat VERSION)
|
||||||
|
NIGHTLY_SUFFIX=""
|
||||||
|
|
||||||
|
if [[ "$1" == "nightly" ]]; then
|
||||||
|
# Beispiel: 20240511.1358 → 11. Mai, 13:58 Uhr
|
||||||
|
NIGHTLY_SUFFIX=".dev$(date +%Y%m%d%H%M)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
FULL_VERSION="${BASE_VERSION}${NIGHTLY_SUFFIX}"
|
||||||
|
|
||||||
|
echo "Using version: $FULL_VERSION"
|
||||||
|
poetry version "$FULL_VERSION"
|
||||||
21
.gitea/scripts/sync_version_from_poetry.sh
Executable file
21
.gitea/scripts/sync_version_from_poetry.sh
Executable file
@@ -0,0 +1,21 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Stelle sicher, dass wir im Projektverzeichnis sind
|
||||||
|
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
|
||||||
|
cd "$ROOT_DIR"
|
||||||
|
|
||||||
|
PYPROJECT="pyproject.toml"
|
||||||
|
VERSION_FILE="VERSION"
|
||||||
|
|
||||||
|
# Extrahiere die Version mit grep + sed (keine externen Abhängigkeiten nötig)
|
||||||
|
VERSION=$(grep -E '^version\s*=' "$PYPROJECT" | head -n1 | sed -E 's/.*=\s*"([^"]+)".*/\1/')
|
||||||
|
|
||||||
|
if [[ -z "$VERSION" ]]; then
|
||||||
|
echo "❌ Version konnte nicht aus $PYPROJECT gelesen werden."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
printf "%s" "$VERSION" > "$VERSION_FILE"
|
||||||
|
echo "✅ Version synchronisiert: $VERSION → $VERSION_FILE"
|
||||||
40
.gitea/scripts/upload-asset.sh
Executable file
40
.gitea/scripts/upload-asset.sh
Executable file
@@ -0,0 +1,40 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Eingabeparameter
|
||||||
|
FILE_PATH="$1" # z. B. ./dist/build.zip
|
||||||
|
CUSTOM_NAME="${2:-}" # optional: anderer Name unter dem das Asset gespeichert werden soll
|
||||||
|
RELEASE_ID="${GT_RELEASE_ID:-}" # aus Umgebung
|
||||||
|
|
||||||
|
# Validierung
|
||||||
|
if [[ -z "$RELEASE_ID" ]]; then
|
||||||
|
echo "❌ RELEASE_ID ist nicht gesetzt. Abbruch."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ! -f "$FILE_PATH" ]]; then
|
||||||
|
echo "❌ Datei '$FILE_PATH' existiert nicht. Abbruch."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Default-Konfiguration
|
||||||
|
TOKEN="${ACTIONS_RUNTIME_TOKEN:-<fallback_token>}"
|
||||||
|
REPO="${GITHUB_REPOSITORY:-owner/example}"
|
||||||
|
API="${GITHUB_API_URL:-https://gitea.example.tld/api/v1}"
|
||||||
|
|
||||||
|
# Owner/Repo auflösen
|
||||||
|
OWNER=$(echo "$REPO" | cut -d/ -f1)
|
||||||
|
NAME=$(echo "$REPO" | cut -d/ -f2)
|
||||||
|
|
||||||
|
# Dateiname setzen
|
||||||
|
FILENAME="${CUSTOM_NAME:-$(basename "$FILE_PATH")}"
|
||||||
|
|
||||||
|
echo "🔼 Uploading '$FILE_PATH' as '$FILENAME' to release ID $RELEASE_ID"
|
||||||
|
|
||||||
|
# Upload
|
||||||
|
curl -sf -X POST \
|
||||||
|
-H "Authorization: token $TOKEN" \
|
||||||
|
-F "attachment=@$FILE_PATH" \
|
||||||
|
"$API/repos/$OWNER/$NAME/releases/$RELEASE_ID/assets?name=$FILENAME"
|
||||||
|
|
||||||
|
echo "✅ Upload abgeschlossen: $FILENAME"
|
||||||
62
.gitea/workflows/build-and-deploy-nightly.yml
Normal file
62
.gitea/workflows/build-and-deploy-nightly.yml
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
name: Build and Publish nightly package
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
paths-ignore:
|
||||||
|
- 'CHANGELOG.md'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-publish:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout Repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: 🐍 Setup Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "3.12"
|
||||||
|
|
||||||
|
- name: 🔄 Restore cache
|
||||||
|
uses: https://git.0xmax42.io/actions/cache@v1
|
||||||
|
with:
|
||||||
|
key: poetry-v1-${{ runner.os }}-${{ hashFiles('poetry.lock') }}
|
||||||
|
paths: |
|
||||||
|
~/.cache/pypoetry
|
||||||
|
~/.cache/pip
|
||||||
|
|
||||||
|
- name: Install Poetry
|
||||||
|
run: |
|
||||||
|
pip install poetry
|
||||||
|
|
||||||
|
- name: Install Project Dependencies
|
||||||
|
working-directory: .
|
||||||
|
run: |
|
||||||
|
poetry install
|
||||||
|
|
||||||
|
- name: Set version from VERSION file (with nightly suffix)
|
||||||
|
run: ./.gitea/scripts/set_poetry_version.sh nightly
|
||||||
|
|
||||||
|
- name: Build Package
|
||||||
|
working-directory: .
|
||||||
|
run: |
|
||||||
|
poetry build
|
||||||
|
|
||||||
|
- name: Publish to Gitea Package Registry
|
||||||
|
working-directory: .
|
||||||
|
env:
|
||||||
|
TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }}
|
||||||
|
TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }}
|
||||||
|
run: |
|
||||||
|
poetry run twine upload --repository-url ${{ secrets.TWINE_URL }} dist/*
|
||||||
|
|
||||||
|
- name: Cleanup old dev versions
|
||||||
|
run: |
|
||||||
|
.gitea/scripts/cleanup_versions.sh '\.dev'
|
||||||
|
env:
|
||||||
|
TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }}
|
||||||
|
TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }}
|
||||||
64
.gitea/workflows/build-and-deploy-release.yml
Normal file
64
.gitea/workflows/build-and-deploy-release.yml
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
name: Build and Publish nightly package
|
||||||
|
|
||||||
|
on:
|
||||||
|
release:
|
||||||
|
types: [published]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-publish:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout Repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: ${{ github.event.release.tag_name }}
|
||||||
|
|
||||||
|
- name: 🐍 Setup Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "3.12"
|
||||||
|
|
||||||
|
- name: 🔄 Restore cache
|
||||||
|
uses: https://git.0xmax42.io/actions/cache@v1
|
||||||
|
with:
|
||||||
|
key: poetry-v1-${{ runner.os }}-${{ hashFiles('poetry.lock') }}
|
||||||
|
paths: |
|
||||||
|
~/.cache/pypoetry
|
||||||
|
~/.cache/pip
|
||||||
|
|
||||||
|
- name: Install Poetry
|
||||||
|
run: |
|
||||||
|
pip install poetry
|
||||||
|
|
||||||
|
- name: Install Project Dependencies
|
||||||
|
working-directory: .
|
||||||
|
run: |
|
||||||
|
poetry install
|
||||||
|
|
||||||
|
- name: Build Package
|
||||||
|
working-directory: .
|
||||||
|
run: |
|
||||||
|
poetry build
|
||||||
|
|
||||||
|
- name: Get built wheel filename
|
||||||
|
id: get_whl
|
||||||
|
run: |
|
||||||
|
echo "whl_file=$(basename dist/*.whl)" >> $GITHUB_OUTPUT
|
||||||
|
echo "sdist_file=$(basename dist/*.tar.gz)" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Publish to Gitea Package Registry
|
||||||
|
working-directory: .
|
||||||
|
env:
|
||||||
|
TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }}
|
||||||
|
TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }}
|
||||||
|
run: |
|
||||||
|
poetry run twine upload --repository-url ${{ secrets.TWINE_URL }} dist/*
|
||||||
|
|
||||||
|
- name: Get Release ID from tag
|
||||||
|
run: .gitea/scripts/get-release-id.sh "${{ github.event.release.tag_name }}"
|
||||||
|
|
||||||
|
- name: Upload assets
|
||||||
|
run: |
|
||||||
|
.gitea/scripts/upload-asset.sh ./dist/${{ steps.get_whl.outputs.whl_file }}
|
||||||
|
.gitea/scripts/upload-asset.sh ./dist/${{ steps.get_whl.outputs.sdist_file }}
|
||||||
18
.gitea/workflows/release.yml
Normal file
18
.gitea/workflows/release.yml
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
name: Auto Changelog & (Release)
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
release:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- name: Release
|
||||||
|
uses: https://git.0xmax42.io/actions/auto-changelog-release-action@v0
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.RELEASE_PUBLISH_TOKEN }}
|
||||||
12
.gitignore
vendored
12
.gitignore
vendored
@@ -174,3 +174,15 @@ cython_debug/
|
|||||||
# PyPI configuration file
|
# PyPI configuration file
|
||||||
.pypirc
|
.pypirc
|
||||||
|
|
||||||
|
# hdlbuild specific
|
||||||
|
|
||||||
|
.hdlbuild_deps/
|
||||||
|
.working/
|
||||||
|
reports/
|
||||||
|
output/
|
||||||
|
vhdl/
|
||||||
|
poetry.lock
|
||||||
|
project.yml
|
||||||
|
.project/
|
||||||
|
.devcontainer/
|
||||||
|
vhdltests/
|
||||||
15
.vscode/settings.json
vendored
Normal file
15
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"python.envFile": "${workspaceFolder}/.env",
|
||||||
|
"python.analysis.extraPaths": [
|
||||||
|
"src"
|
||||||
|
],
|
||||||
|
"workbench.colorCustomizations": {
|
||||||
|
"activityBar.activeBackground": "#8dc4ff",
|
||||||
|
"activityBar.background": "#8dc4ff",
|
||||||
|
"activityBar.foreground": "#15202b",
|
||||||
|
"activityBar.inactiveForeground": "#15202b99",
|
||||||
|
"activityBarBadge.background": "#ff007b",
|
||||||
|
"activityBarBadge.foreground": "#e7e7e7"
|
||||||
|
},
|
||||||
|
"peacock.color": "#5aaaff"
|
||||||
|
}
|
||||||
31
CHANGELOG.md
Normal file
31
CHANGELOG.md
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
|
## [0.6.1] - 2025-07-16
|
||||||
|
|
||||||
|
### 🚀 Features
|
||||||
|
|
||||||
|
- *(ci)* Add workflows for nightly builds and releases - ([da387f2](https://git.0xmax42.io/maxp/hdlbuild/commit/da387f2ee602390d616c79bf4057ccf941e21462))
|
||||||
|
|
||||||
|
### 🚜 Refactor
|
||||||
|
|
||||||
|
- Use typer for CLI argument parsing - ([6ca389d](https://git.0xmax42.io/maxp/hdlbuild/commit/6ca389d5cbbeff53faab9d61376a8c77ed097b6c))
|
||||||
|
- Improves project configuration - ([175bf48](https://git.0xmax42.io/maxp/hdlbuild/commit/175bf4882a8f172ee536d726b31136690572be36))
|
||||||
|
|
||||||
|
### 📚 Documentation
|
||||||
|
|
||||||
|
- *(readme)* Expand README with detailed usage and setup - ([fa7e738](https://git.0xmax42.io/maxp/hdlbuild/commit/fa7e738b7eade5a627218741a6fb4bd1617f7801))
|
||||||
|
|
||||||
|
### 🎨 Styling
|
||||||
|
|
||||||
|
- *(pyproject)* Simplify include array formatting - ([08679c2](https://git.0xmax42.io/maxp/hdlbuild/commit/08679c2680b49119e0414688a80e8dc2659236b4))
|
||||||
|
- Updates VS Code editor color scheme - ([1d7bc19](https://git.0xmax42.io/maxp/hdlbuild/commit/1d7bc1996522ab54970348b5118ad319849a6a1f))
|
||||||
|
|
||||||
|
### ⚙️ Miscellaneous Tasks
|
||||||
|
|
||||||
|
- *(config)* Add git-cliff configuration file - ([dde2363](https://git.0xmax42.io/maxp/hdlbuild/commit/dde2363ad7dd2fd2d48c6154e3b88c1c4a6867fd))
|
||||||
|
- Updates project version to 0.6.0 - ([7b6f9ef](https://git.0xmax42.io/maxp/hdlbuild/commit/7b6f9ef2240864b103903e79c895a76db59c14fa))
|
||||||
|
- Remove build-deb.yml workflow file - ([0d26c42](https://git.0xmax42.io/maxp/hdlbuild/commit/0d26c42f8ae419d509aa47d3f7a23bfdd08cf79b))
|
||||||
|
|
||||||
|
|
||||||
168
README.md
168
README.md
@@ -1,2 +1,168 @@
|
|||||||
# hdlbuild
|
# HDLBuild
|
||||||
|
|
||||||
|
HDLBuild is a flexible build management tool for FPGA projects. It simplifies the process of managing dependencies, building, testing, and deploying FPGA designs using Xilinx ISE tools.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- **Dependency Management**: Automatically resolves and manages project dependencies from Git repositories.
|
||||||
|
- **Build Automation**: Supports synthesis, implementation, and bitstream generation for FPGA designs.
|
||||||
|
- **Testbench Execution**: Automates the process of building and running testbenches.
|
||||||
|
- **Customizable Tool Options**: Provides extensive configuration options for Xilinx ISE tools.
|
||||||
|
- **Project Initialization**: Quickly set up new projects with predefined templates.
|
||||||
|
- **Rich Console Output**: Provides detailed and interactive console feedback using `rich`.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
### Prerequisites
|
||||||
|
|
||||||
|
- Python 3.10 or higher
|
||||||
|
- Poetry (for dependency management)
|
||||||
|
- Xilinx ISE (14.7) installed and configured
|
||||||
|
|
||||||
|
### Steps
|
||||||
|
|
||||||
|
1. Clone the repository:
|
||||||
|
```bash
|
||||||
|
git clone https://github.com/your-repo/hdlbuild.git
|
||||||
|
cd hdlbuild
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Install dependencies:
|
||||||
|
```bash
|
||||||
|
poetry install
|
||||||
|
```
|
||||||
|
|
||||||
|
3. Add the `hdlbuild` CLI to your PATH:
|
||||||
|
```bash
|
||||||
|
poetry shell
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### CLI Commands
|
||||||
|
|
||||||
|
HDLBuild provides a command-line interface (CLI) for managing FPGA projects. Below are the available commands:
|
||||||
|
|
||||||
|
#### 1. **Initialize a New Project**
|
||||||
|
```bash
|
||||||
|
hdlbuild init
|
||||||
|
```
|
||||||
|
- Creates a new project with a project.yml configuration file and a .gitignore file.
|
||||||
|
|
||||||
|
#### 2. **Resolve Dependencies**
|
||||||
|
```bash
|
||||||
|
hdlbuild dep
|
||||||
|
```
|
||||||
|
- Clones and resolves all project dependencies defined in project.yml.
|
||||||
|
|
||||||
|
#### 3. **Build the Project**
|
||||||
|
```bash
|
||||||
|
hdlbuild build
|
||||||
|
```
|
||||||
|
- Runs the full build process, including synthesis, implementation, and bitstream generation.
|
||||||
|
|
||||||
|
- To only synthesize the design:
|
||||||
|
```bash
|
||||||
|
hdlbuild build synth
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 4. **Run Testbenches**
|
||||||
|
```bash
|
||||||
|
hdlbuild test <testbench_name>
|
||||||
|
```
|
||||||
|
- Builds and runs the specified testbench.
|
||||||
|
|
||||||
|
#### 5. **Clean Build Artifacts**
|
||||||
|
```bash
|
||||||
|
hdlbuild clean
|
||||||
|
```
|
||||||
|
- Removes build artifacts.
|
||||||
|
|
||||||
|
- To clean all generated files:
|
||||||
|
```bash
|
||||||
|
hdlbuild clean all
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
The project is configured using a project.yml file. Below is an example configuration:
|
||||||
|
|
||||||
|
```yml
|
||||||
|
name: MyFPGAProject
|
||||||
|
topmodule: top_module
|
||||||
|
target_device: xc3s1200e-4-fg320
|
||||||
|
xilinx_path: /opt/Xilinx/14.7/ISE_DS/ISE
|
||||||
|
|
||||||
|
constraints: constraints.ucf
|
||||||
|
|
||||||
|
sources:
|
||||||
|
vhdl:
|
||||||
|
- path: src/*.vhd
|
||||||
|
library: work
|
||||||
|
|
||||||
|
testbenches:
|
||||||
|
vhdl:
|
||||||
|
- path: tests/*.vhd
|
||||||
|
library: work
|
||||||
|
|
||||||
|
dependencies:
|
||||||
|
- git: "https://github.com/example/dependency.git"
|
||||||
|
rev: "main"
|
||||||
|
|
||||||
|
build:
|
||||||
|
build_dir: working
|
||||||
|
report_dir: reports
|
||||||
|
copy_target_dir: output
|
||||||
|
|
||||||
|
tool_options:
|
||||||
|
xst:
|
||||||
|
- "-opt_mode Speed"
|
||||||
|
- "-opt_level 2"
|
||||||
|
map:
|
||||||
|
- "-detail"
|
||||||
|
- "-timing"
|
||||||
|
par: []
|
||||||
|
bitgen:
|
||||||
|
- "-g StartupClk:JtagClk"
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Development
|
||||||
|
|
||||||
|
### Building the Package
|
||||||
|
|
||||||
|
To build the Python package:
|
||||||
|
```bash
|
||||||
|
poetry build
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## GitHub Actions
|
||||||
|
|
||||||
|
The project includes GitHub workflows for building and deploying the package:
|
||||||
|
|
||||||
|
1. **Build and Publish**: build-and-deploy.yml
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
This project is licensed under the MIT License.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
Contributions are welcome! Please follow these steps:
|
||||||
|
|
||||||
|
1. Fork the repository.
|
||||||
|
2. Create a new branch for your feature or bugfix.
|
||||||
|
3. Submit a pull request.
|
||||||
|
|||||||
104
cliff.toml
Normal file
104
cliff.toml
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
# CLIFF_VERSION=2.8.0
|
||||||
|
# git-cliff ~ default configuration file
|
||||||
|
# https://git-cliff.org/docs/configuration
|
||||||
|
#
|
||||||
|
# Lines starting with "#" are comments.
|
||||||
|
# Configuration options are organized into tables and keys.
|
||||||
|
# See documentation for more information on available options.
|
||||||
|
[remote.gitea]
|
||||||
|
owner = "maxp"
|
||||||
|
repo = "hdlbuild"
|
||||||
|
|
||||||
|
[changelog]
|
||||||
|
# postprocessors
|
||||||
|
postprocessors = [
|
||||||
|
{ pattern = '<GITEA_URL>', replace = "https://git.0xmax42.io" }, # replace gitea url
|
||||||
|
]
|
||||||
|
|
||||||
|
# template for the changelog header
|
||||||
|
header = """
|
||||||
|
# Changelog\n
|
||||||
|
All notable changes to this project will be documented in this file.\n
|
||||||
|
"""
|
||||||
|
# template for the changelog body
|
||||||
|
# https://keats.github.io/tera/docs/#introduction
|
||||||
|
body = """
|
||||||
|
{%- macro remote_url() -%}
|
||||||
|
<GITEA_URL>/{{ remote.gitea.owner }}/{{ remote.gitea.repo }}
|
||||||
|
{%- endmacro -%}
|
||||||
|
|
||||||
|
{% if version %}\
|
||||||
|
{% if previous.version %}\
|
||||||
|
## [{{ version | trim_start_matches(pat="v") }}]\
|
||||||
|
({{ self::remote_url() }}/compare/{{ previous.version }}..{{ version }}) - {{ timestamp | date(format="%Y-%m-%d") }}
|
||||||
|
{% else %}\
|
||||||
|
## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }}
|
||||||
|
{% endif %}\
|
||||||
|
{% else %}\
|
||||||
|
## [unreleased]
|
||||||
|
{% endif %}\
|
||||||
|
{% for group, commits in commits | group_by(attribute="group") %}
|
||||||
|
### {{ group | striptags | trim | upper_first }}
|
||||||
|
{% for commit in commits %}
|
||||||
|
- {% if commit.scope %}*({{ commit.scope }})* {% endif %}\
|
||||||
|
{% if commit.breaking %}[**breaking**] {% endif %}\
|
||||||
|
{{ commit.message | upper_first }} - \
|
||||||
|
([{{ commit.id | truncate(length=7, end="") }}]({{ self::remote_url() }}/commit/{{ commit.id }}))\
|
||||||
|
{% endfor %}
|
||||||
|
{% endfor %}\n
|
||||||
|
"""
|
||||||
|
# template for the changelog footer
|
||||||
|
footer = """
|
||||||
|
|
||||||
|
"""
|
||||||
|
# remove the leading and trailing s
|
||||||
|
trim = true
|
||||||
|
|
||||||
|
# render body even when there are no releases to process
|
||||||
|
# render_always = true
|
||||||
|
# output file path
|
||||||
|
# output = "test.md"
|
||||||
|
|
||||||
|
[git]
|
||||||
|
# parse the commits based on https://www.conventionalcommits.org
|
||||||
|
conventional_commits = true
|
||||||
|
# filter out the commits that are not conventional
|
||||||
|
filter_unconventional = true
|
||||||
|
# process each line of a commit as an individual commit
|
||||||
|
split_commits = false
|
||||||
|
# regex for preprocessing the commit messages
|
||||||
|
commit_preprocessors = [
|
||||||
|
# Replace issue numbers
|
||||||
|
#{ pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](<REPO>/issues/${2}))"},
|
||||||
|
# Check spelling of the commit with https://github.com/crate-ci/typos
|
||||||
|
# If the spelling is incorrect, it will be automatically fixed.
|
||||||
|
#{ pattern = '.*', replace_command = 'typos --write-changes -' },
|
||||||
|
]
|
||||||
|
# regex for parsing and grouping commits
|
||||||
|
commit_parsers = [
|
||||||
|
{ message = "^feat", group = "<!-- 0 -->🚀 Features" },
|
||||||
|
{ message = "^fix", group = "<!-- 1 -->🐛 Bug Fixes" },
|
||||||
|
{ message = "^doc", group = "<!-- 3 -->📚 Documentation" },
|
||||||
|
{ message = "^perf", group = "<!-- 4 -->⚡ Performance" },
|
||||||
|
{ message = "^refactor", group = "<!-- 2 -->🚜 Refactor" },
|
||||||
|
{ message = "^style", group = "<!-- 5 -->🎨 Styling" },
|
||||||
|
{ message = "^test", group = "<!-- 6 -->🧪 Testing" },
|
||||||
|
{ message = "^chore\\(changelog\\)", skip = true },
|
||||||
|
{ message = "^chore\\(version\\)", skip = true },
|
||||||
|
{ message = "^chore\\(release\\): prepare for", skip = true },
|
||||||
|
{ message = "^chore\\(deps.*\\)", skip = true },
|
||||||
|
{ message = "^chore\\(pr\\)", skip = true },
|
||||||
|
{ message = "^chore\\(pull\\)", skip = true },
|
||||||
|
{ message = "^chore|^ci", group = "<!-- 7 -->⚙️ Miscellaneous Tasks" },
|
||||||
|
{ body = ".*security", group = "<!-- 8 -->🛡️ Security" },
|
||||||
|
{ message = "^revert", group = "<!-- 9 -->◀️ Revert" },
|
||||||
|
{ message = ".*", group = "<!-- 10 -->💼 Other" },
|
||||||
|
]
|
||||||
|
# Regex to select git tags that represent releases.
|
||||||
|
tag_pattern = "v[0-9]+\\.[0-9]+\\.[0-9]+"
|
||||||
|
# filter out the commits that are not matched by commit parsers
|
||||||
|
filter_commits = false
|
||||||
|
# sort the tags topologically
|
||||||
|
topo_order = false
|
||||||
|
# sort the commits inside sections by oldest/newest order
|
||||||
|
sort_commits = "newest"
|
||||||
268
project.example.yml
Normal file
268
project.example.yml
Normal file
@@ -0,0 +1,268 @@
|
|||||||
|
name:
|
||||||
|
topmodule:
|
||||||
|
target_device: xc3s1200e-4-fg320
|
||||||
|
xilinx_path: /opt/Xilinx/14.7/ISE_DS/ISE
|
||||||
|
|
||||||
|
constraints:
|
||||||
|
|
||||||
|
sources:
|
||||||
|
vhdl:
|
||||||
|
- path: src/*.vhd
|
||||||
|
library: work
|
||||||
|
|
||||||
|
testbenches:
|
||||||
|
vhdl:
|
||||||
|
- path: tests/*.vhd
|
||||||
|
library: work
|
||||||
|
|
||||||
|
dependencies:
|
||||||
|
# - git: "https://git.0xmax42.io/maxp/Asynchronous-FIFO-AXI-Handshake.git"
|
||||||
|
# rev: "hdlbuild"
|
||||||
|
|
||||||
|
build:
|
||||||
|
build_dir: working
|
||||||
|
report_dir: reports
|
||||||
|
copy_target_dir: output
|
||||||
|
|
||||||
|
# Tool Optionen
|
||||||
|
tool_options:
|
||||||
|
common:
|
||||||
|
- "-intstyle"
|
||||||
|
- "xflow"
|
||||||
|
|
||||||
|
ngdbuild: []
|
||||||
|
|
||||||
|
map:
|
||||||
|
- "-detail"
|
||||||
|
- "-timing"
|
||||||
|
- "-ol"
|
||||||
|
- "high"
|
||||||
|
|
||||||
|
par: []
|
||||||
|
|
||||||
|
bitgen:
|
||||||
|
- "-g"
|
||||||
|
- "StartupClk:JtagClk"
|
||||||
|
|
||||||
|
trace:
|
||||||
|
- "-v"
|
||||||
|
- "3"
|
||||||
|
- "-n"
|
||||||
|
- "3"
|
||||||
|
|
||||||
|
fuse:
|
||||||
|
- "-incremental"
|
||||||
|
|
||||||
|
isim:
|
||||||
|
- "-gui"
|
||||||
|
|
||||||
|
xst:
|
||||||
|
# Optimization goal: prioritize speed or area.
|
||||||
|
# Values: Speed | Area
|
||||||
|
- "-opt_mode Speed"
|
||||||
|
|
||||||
|
# Optimization level: more aggressive optimizations at level 2.
|
||||||
|
# Values: 1 | 2
|
||||||
|
- "-opt_level 2"
|
||||||
|
|
||||||
|
# Use the new XST parser (recommended for modern designs).
|
||||||
|
# Values: yes | no
|
||||||
|
- "-use_new_parser yes"
|
||||||
|
|
||||||
|
# Preserve design hierarchy or allow flattening for optimization.
|
||||||
|
# Values: Yes | No | Soft
|
||||||
|
- "-keep_hierarchy No"
|
||||||
|
|
||||||
|
# Determines how hierarchy is preserved in the netlist.
|
||||||
|
# Values: As_Optimized | Rebuilt
|
||||||
|
- "-netlist_hierarchy As_Optimized"
|
||||||
|
|
||||||
|
# Global optimization strategy for nets.
|
||||||
|
# Values: AllClockNets | Offset_In_Before | Offset_Out_After | Inpad_To_Outpad | Max_Delay
|
||||||
|
- "-glob_opt AllClockNets"
|
||||||
|
|
||||||
|
## Misc ##
|
||||||
|
|
||||||
|
# Enable reading of IP cores.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-read_cores YES"
|
||||||
|
|
||||||
|
# Do not write timing constraints into synthesis report.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-write_timing_constraints NO"
|
||||||
|
|
||||||
|
# Analyze paths across different clock domains.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-cross_clock_analysis NO"
|
||||||
|
|
||||||
|
# Character used to separate hierarchy levels in instance names.
|
||||||
|
# Default: /
|
||||||
|
- "-hierarchy_separator /"
|
||||||
|
|
||||||
|
# Delimiters used for bus signals.
|
||||||
|
# Values: <> | [] | () | {}
|
||||||
|
- "-bus_delimiter <>"
|
||||||
|
|
||||||
|
# Maintain original case of identifiers.
|
||||||
|
# Values: Maintain | Upper | Lower
|
||||||
|
- "-case Maintain"
|
||||||
|
|
||||||
|
# Target maximum utilization ratio for slices.
|
||||||
|
# Values: 1–100
|
||||||
|
- "-slice_utilization_ratio 100"
|
||||||
|
|
||||||
|
# Target maximum utilization ratio for BRAMs.
|
||||||
|
# Values: 1–100
|
||||||
|
- "-bram_utilization_ratio 100"
|
||||||
|
|
||||||
|
# Use Verilog 2001 syntax features.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-verilog2001 YES"
|
||||||
|
|
||||||
|
#### HDL Options ####
|
||||||
|
|
||||||
|
## FSM ##
|
||||||
|
|
||||||
|
# Extract FSMs (Finite State Machines) from HDL code.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-fsm_extract YES"
|
||||||
|
|
||||||
|
# Encoding strategy for FSMs.
|
||||||
|
# Values: Auto | Gray | One-Hot | Johnson | Compact | Sequential | Speed1 | User
|
||||||
|
- "-fsm_encoding Auto"
|
||||||
|
|
||||||
|
# Add safe logic for undefined FSM states.
|
||||||
|
# Values: Yes | No
|
||||||
|
- "-safe_implementation No"
|
||||||
|
|
||||||
|
# Structure used to implement FSMs.
|
||||||
|
# Values: LUT | BRAM
|
||||||
|
- "-fsm_style LUT"
|
||||||
|
|
||||||
|
## RAM/ROM ##
|
||||||
|
|
||||||
|
# Extract RAM inference from HDL.
|
||||||
|
# Values: Yes | No
|
||||||
|
- "-ram_extract Yes"
|
||||||
|
|
||||||
|
# Style used to implement RAM.
|
||||||
|
# Values: Auto | Block | Distributed
|
||||||
|
- "-ram_style Auto"
|
||||||
|
|
||||||
|
# Extract ROM inference from HDL.
|
||||||
|
# Values: Yes | No
|
||||||
|
- "-rom_extract Yes"
|
||||||
|
|
||||||
|
# Style used for implementing ROM.
|
||||||
|
# Values: Auto | Distributed | Block
|
||||||
|
- "-rom_style Auto"
|
||||||
|
|
||||||
|
# Enable or disable automatic BRAM packing.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-auto_bram_packing NO"
|
||||||
|
|
||||||
|
## MUX/Decoder/Shift Register ##
|
||||||
|
|
||||||
|
# Extract multiplexers where possible.
|
||||||
|
# Values: Yes | No | Force
|
||||||
|
- "-mux_extract Yes"
|
||||||
|
|
||||||
|
# Style used for implementing MUX logic.
|
||||||
|
# Values: Auto | MUXCY | MUXF
|
||||||
|
- "-mux_style Auto"
|
||||||
|
|
||||||
|
# Extract decoder logic from behavioral code.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-decoder_extract YES"
|
||||||
|
|
||||||
|
# Extract and optimize priority encoder structures.
|
||||||
|
# Values: Yes | No | Force
|
||||||
|
- "-priority_extract Yes"
|
||||||
|
|
||||||
|
# Extract shift register logic.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-shreg_extract YES"
|
||||||
|
|
||||||
|
# Extract simple shift operations into dedicated hardware.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-shift_extract YES"
|
||||||
|
|
||||||
|
## Multiplier ##
|
||||||
|
|
||||||
|
# Style for implementing multipliers.
|
||||||
|
# Values: Auto | LUT | Pipe_LUT | Pipe_Block | Block
|
||||||
|
- "-mult_style Auto"
|
||||||
|
|
||||||
|
## Misc ##
|
||||||
|
|
||||||
|
# Collapse XOR trees where beneficial.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-xor_collapse YES"
|
||||||
|
|
||||||
|
# Share resources like adders or multipliers between logic blocks.
|
||||||
|
# Values: YES | NO | Force
|
||||||
|
- "-resource_sharing YES"
|
||||||
|
|
||||||
|
# Convert asynchronous resets to synchronous where possible.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-async_to_sync NO"
|
||||||
|
|
||||||
|
#### Xilinx Specific Options ####
|
||||||
|
|
||||||
|
## Optimization ##
|
||||||
|
|
||||||
|
# Enable removal of logically equivalent registers.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-equivalent_register_removal YES"
|
||||||
|
|
||||||
|
# Duplicate registers to reduce fanout or improve timing.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-register_duplication YES"
|
||||||
|
|
||||||
|
# Move registers across logic to balance timing.
|
||||||
|
# Values: Yes | No | Forward | Backward
|
||||||
|
- "-register_balancing No"
|
||||||
|
|
||||||
|
# Use clock enable signals where possible.
|
||||||
|
# Values: Auto | Yes | No
|
||||||
|
- "-use_clock_enable Yes"
|
||||||
|
|
||||||
|
# Use synchronous set (preset) signals when available.
|
||||||
|
# Values: Auto | Yes | No
|
||||||
|
- "-use_sync_set Yes"
|
||||||
|
|
||||||
|
# Use synchronous reset signals where possible.
|
||||||
|
# Values: Auto | Yes | No
|
||||||
|
- "-use_sync_reset Yes"
|
||||||
|
|
||||||
|
## I/O ##
|
||||||
|
|
||||||
|
# Insert IO buffers for top-level ports.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-iobuf YES"
|
||||||
|
|
||||||
|
# Placement strategy for IOB registers (Auto = let tools decide).
|
||||||
|
# Values: Auto | YES | NO
|
||||||
|
- "-iob Auto"
|
||||||
|
|
||||||
|
## Misc ##
|
||||||
|
|
||||||
|
# Maximum allowed fanout for a net.
|
||||||
|
# Values: integer (e.g., 500)
|
||||||
|
- "-max_fanout 500"
|
||||||
|
|
||||||
|
# Maximum number of BUFGs (global buffers) to use.
|
||||||
|
# Values: 0–32 (device-dependent)
|
||||||
|
- "-bufg 24"
|
||||||
|
|
||||||
|
# Enable logic packing into slices.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-slice_packing YES"
|
||||||
|
|
||||||
|
# Try to reduce the number of primitive instances used.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-optimize_primitives NO"
|
||||||
|
|
||||||
|
# Margin in percent beyond the target slice utilization.
|
||||||
|
# Values: 0–100
|
||||||
|
- "-slice_utilization_ratio_maxmargin 5"
|
||||||
27
pyproject.toml
Normal file
27
pyproject.toml
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
[tool.poetry]
|
||||||
|
name = "hdlbuild"
|
||||||
|
version = "0.6.1"
|
||||||
|
description = "Flexible FPGA Build System"
|
||||||
|
authors = ["0xMax42 <Mail@0xMax42.io>"]
|
||||||
|
license = "MIT"
|
||||||
|
readme = "README.md"
|
||||||
|
packages = [{ include = "hdlbuild", from = "src" }]
|
||||||
|
include = ["src/hdlbuild/templates/*"]
|
||||||
|
|
||||||
|
[tool.poetry.scripts]
|
||||||
|
hdlbuild = "hdlbuild.cli:main"
|
||||||
|
|
||||||
|
[tool.poetry.dependencies]
|
||||||
|
python = "^3.10"
|
||||||
|
pyyaml = "^6.0.2"
|
||||||
|
pydantic = "^2.11.3"
|
||||||
|
rich = "^14.0.0"
|
||||||
|
gitpython = "^3.1.44"
|
||||||
|
typer = "^0.16.0"
|
||||||
|
|
||||||
|
[tool.poetry.group.dev.dependencies]
|
||||||
|
twine = "^6.1.0"
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
requires = ["poetry-core"]
|
||||||
|
build-backend = "poetry.core.masonry.api"
|
||||||
0
src/hdlbuild/__init__.py
Normal file
0
src/hdlbuild/__init__.py
Normal file
32
src/hdlbuild/cli.py
Normal file
32
src/hdlbuild/cli.py
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
import typer
|
||||||
|
from importlib.metadata import version, PackageNotFoundError
|
||||||
|
|
||||||
|
from hdlbuild.commands.build import cli as build_cli
|
||||||
|
from hdlbuild.commands.clean import cli as clean_cli
|
||||||
|
from hdlbuild.commands.dep import cli as dep_cli
|
||||||
|
from hdlbuild.commands.test import cli as test_cli
|
||||||
|
from hdlbuild.commands.init import cli as init_cli
|
||||||
|
|
||||||
|
def get_version() -> str:
|
||||||
|
try:
|
||||||
|
return version("hdlbuild")
|
||||||
|
except PackageNotFoundError:
|
||||||
|
return "unknown"
|
||||||
|
|
||||||
|
app = typer.Typer(
|
||||||
|
rich_help_panel="ℹ️ HDLBuild – FPGA‑Build‑Tool",
|
||||||
|
help=f"hdlbuild v{get_version()} – Build‑Management for FPGA projects"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Unter‑Kommandos registrieren (entspricht add_subparsers)
|
||||||
|
app.add_typer(build_cli, name="build", help="Build the project")
|
||||||
|
app.add_typer(clean_cli, name="clean", help="Clean build artifacts")
|
||||||
|
app.add_typer(dep_cli, name="dep", help="Resolve dependencies")
|
||||||
|
app.add_typer(test_cli, name="test", help="Run simulations/testbenches")
|
||||||
|
app.add_typer(init_cli, name="init", help="Initialize project")
|
||||||
|
|
||||||
|
def main():
|
||||||
|
app()
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
34
src/hdlbuild/commands/build.py
Normal file
34
src/hdlbuild/commands/build.py
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
import typer
|
||||||
|
|
||||||
|
from hdlbuild.tools.xilinx_ise.main import xilinx_ise_all, xilinx_ise_synth
|
||||||
|
from hdlbuild.utils.console_utils import ConsoleUtils
|
||||||
|
from hdlbuild.utils.directory_manager import ensure_directories_exist
|
||||||
|
from hdlbuild.utils.project_loader import load_project_config
|
||||||
|
|
||||||
|
cli = typer.Typer(rich_help_panel="🔨 Build Commands")
|
||||||
|
|
||||||
|
@cli.callback(invoke_without_command=True)
|
||||||
|
def build(
|
||||||
|
target: str = typer.Argument(
|
||||||
|
None,
|
||||||
|
help="Optional: 'synth' to run synthesis only",
|
||||||
|
show_default=False,
|
||||||
|
rich_help_panel="🔨 Build Commands",
|
||||||
|
)
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Run the full build flow or synthesis only.
|
||||||
|
|
||||||
|
* `hdlbuild build` → full flow
|
||||||
|
* `hdlbuild build synth` → synthesis only
|
||||||
|
"""
|
||||||
|
console = ConsoleUtils("hdlbuild")
|
||||||
|
project = load_project_config()
|
||||||
|
|
||||||
|
ensure_directories_exist(True)
|
||||||
|
if target == "synth":
|
||||||
|
console.print("Starting synthesis …")
|
||||||
|
xilinx_ise_synth(project)
|
||||||
|
else:
|
||||||
|
console.print("Starting full build …")
|
||||||
|
xilinx_ise_all(project)
|
||||||
35
src/hdlbuild/commands/clean.py
Normal file
35
src/hdlbuild/commands/clean.py
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
import typer
|
||||||
|
|
||||||
|
from hdlbuild.utils.console_utils import ConsoleUtils
|
||||||
|
from hdlbuild.utils.directory_manager import clear_build_directories, clear_directories
|
||||||
|
|
||||||
|
cli = typer.Typer(rich_help_panel="🧹 Clean Commands")
|
||||||
|
|
||||||
|
@cli.callback(invoke_without_command=True)
|
||||||
|
def clean(
|
||||||
|
target: str = typer.Argument(
|
||||||
|
None,
|
||||||
|
help="Optional: 'all' → wipe *all* artefacts, otherwise only the build directory",
|
||||||
|
show_default=False,
|
||||||
|
)
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Remove build artefacts (`build/*`) or *everything* (`all`).
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
```bash
|
||||||
|
hdlbuild clean # build/* and temporary files only
|
||||||
|
hdlbuild clean all # also caches, logs, etc.
|
||||||
|
```
|
||||||
|
"""
|
||||||
|
console = ConsoleUtils("hdlbuild")
|
||||||
|
|
||||||
|
if target == "all":
|
||||||
|
console.print("Starting clean‑all …")
|
||||||
|
clear_directories()
|
||||||
|
console.print("All artefacts removed.")
|
||||||
|
else:
|
||||||
|
console.print("Removing build artefacts …")
|
||||||
|
clear_build_directories()
|
||||||
|
console.print("Build artefacts removed.")
|
||||||
23
src/hdlbuild/commands/dep.py
Normal file
23
src/hdlbuild/commands/dep.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
import typer
|
||||||
|
|
||||||
|
from hdlbuild.dependencies.resolver import DependencyResolver
|
||||||
|
from hdlbuild.utils.console_utils import ConsoleUtils
|
||||||
|
from hdlbuild.utils.project_loader import load_project_config
|
||||||
|
|
||||||
|
cli = typer.Typer(rich_help_panel="🔗 Dependency Commands")
|
||||||
|
|
||||||
|
@cli.callback(invoke_without_command=True)
|
||||||
|
def dep() -> None:
|
||||||
|
"""
|
||||||
|
Resolve all project dependencies.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
hdlbuild dep
|
||||||
|
```
|
||||||
|
"""
|
||||||
|
console = ConsoleUtils("hdlbuild")
|
||||||
|
project = load_project_config()
|
||||||
|
|
||||||
|
console.print("Resolving dependencies …")
|
||||||
|
DependencyResolver(project).resolve_all()
|
||||||
|
console.print("Dependencies resolved.")
|
||||||
35
src/hdlbuild/commands/init.py
Normal file
35
src/hdlbuild/commands/init.py
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
import shutil
|
||||||
|
from pathlib import Path
|
||||||
|
import typer
|
||||||
|
|
||||||
|
from hdlbuild.utils.console_utils import ConsoleUtils
|
||||||
|
|
||||||
|
cli = typer.Typer(rich_help_panel="🆕 Init Commands")
|
||||||
|
|
||||||
|
@cli.callback(invoke_without_command=True)
|
||||||
|
def init() -> None:
|
||||||
|
"""
|
||||||
|
Initialise a new HDLBuild project in the current directory.
|
||||||
|
|
||||||
|
Copies `.gitignore` and `project.yml` from the template folder.
|
||||||
|
"""
|
||||||
|
console = ConsoleUtils("hdlbuild")
|
||||||
|
project_dir = Path.cwd()
|
||||||
|
|
||||||
|
script_dir = Path(__file__).parent.resolve()
|
||||||
|
template_dir = (script_dir / ".." / "templates").resolve()
|
||||||
|
|
||||||
|
files = [
|
||||||
|
("gitignore.template", ".gitignore"),
|
||||||
|
("project.yml.template", "project.yml"),
|
||||||
|
]
|
||||||
|
|
||||||
|
for template_name, target_name in files:
|
||||||
|
template_path = template_dir / template_name
|
||||||
|
target_path = project_dir / target_name
|
||||||
|
|
||||||
|
if not target_path.exists():
|
||||||
|
shutil.copy(template_path, target_path)
|
||||||
|
console.print(f"Created {target_name}")
|
||||||
|
else:
|
||||||
|
console.print(f"{target_name} already exists – skipping.")
|
||||||
31
src/hdlbuild/commands/test.py
Normal file
31
src/hdlbuild/commands/test.py
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
import typer
|
||||||
|
|
||||||
|
from hdlbuild.tools.xilinx_ise.isim import build_testbench, run_testbench
|
||||||
|
from hdlbuild.utils.console_utils import ConsoleUtils
|
||||||
|
from hdlbuild.utils.project_loader import load_project_config
|
||||||
|
|
||||||
|
cli = typer.Typer(rich_help_panel="🧪 Test Commands")
|
||||||
|
|
||||||
|
@cli.callback(invoke_without_command=True)
|
||||||
|
def test(
|
||||||
|
target: str = typer.Argument(
|
||||||
|
None,
|
||||||
|
help="Name of the test target (leave empty to run all)",
|
||||||
|
show_default=False,
|
||||||
|
)
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Build and run testbenches.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
hdlbuild test # run all TBs
|
||||||
|
hdlbuild test alu # run TB 'alu' only
|
||||||
|
```
|
||||||
|
"""
|
||||||
|
console = ConsoleUtils("hdlbuild")
|
||||||
|
project = load_project_config()
|
||||||
|
|
||||||
|
console.print("Starting test flow …")
|
||||||
|
build_testbench(project, target)
|
||||||
|
run_testbench(project, target)
|
||||||
|
console.print("Tests finished.")
|
||||||
99
src/hdlbuild/dependencies/resolver.py
Normal file
99
src/hdlbuild/dependencies/resolver.py
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
# src/hdlbuild/dependency/resolver.py
|
||||||
|
|
||||||
|
from typing import List, Set
|
||||||
|
|
||||||
|
from git import Repo
|
||||||
|
from hdlbuild.models.config import DIRECTORIES, GIT
|
||||||
|
from hdlbuild.models.project import ProjectConfig
|
||||||
|
from hdlbuild.models.dependency import ResolvedDependency
|
||||||
|
import os
|
||||||
|
|
||||||
|
from hdlbuild.utils.console_utils import ConsoleUtils
|
||||||
|
from hdlbuild.utils.project_loader import load_project_config
|
||||||
|
|
||||||
|
class DependencyResolver:
|
||||||
|
def __init__(self, root_project: ProjectConfig, offline_mode: bool = False):
|
||||||
|
self.root_project = root_project
|
||||||
|
self.offline_mode = offline_mode
|
||||||
|
self.resolved: List[ResolvedDependency] = []
|
||||||
|
self.visited_urls: Set[str] = set()
|
||||||
|
self.console = ConsoleUtils(live=True)
|
||||||
|
self.console.start_live()
|
||||||
|
|
||||||
|
def resolve_all(self):
|
||||||
|
"""Startet das Auflösen aller Abhängigkeiten (rekursiv)."""
|
||||||
|
self._resolve_project(self.root_project)
|
||||||
|
self.console.stop_live("[bold green]Alle Abhängigkeiten aufgelöst.[/bold green]")
|
||||||
|
|
||||||
|
def _resolve_project(self, project: ProjectConfig):
|
||||||
|
"""Löst die Abhängigkeiten eines einzelnen Projekts auf."""
|
||||||
|
for dep in project.dependencies or []:
|
||||||
|
if dep.git in self.visited_urls:
|
||||||
|
continue
|
||||||
|
|
||||||
|
self.visited_urls.add(dep.git)
|
||||||
|
|
||||||
|
local_path = self._clone_or_use_existing(dep.git, dep.rev)
|
||||||
|
dep_project = self._load_project_config(os.path.join(local_path, "project.yml"))
|
||||||
|
|
||||||
|
# Speichern als ResolvedDependency
|
||||||
|
self.resolved.append(ResolvedDependency(project=dep_project, local_path=local_path))
|
||||||
|
|
||||||
|
self._resolve_project(dep_project)
|
||||||
|
|
||||||
|
def _clone_or_use_existing(self, git_url: str, rev: str) -> str:
|
||||||
|
folder_name = os.path.basename(git_url.rstrip("/")).replace(".git", "")
|
||||||
|
local_path = os.path.join(DIRECTORIES.dependency, folder_name)
|
||||||
|
|
||||||
|
if os.path.exists(local_path):
|
||||||
|
# Lokales Repo vorhanden
|
||||||
|
self.console.print(f"[bold green]Benutze vorhandenes Repository: {folder_name}[/bold green]")
|
||||||
|
repo = Repo(local_path)
|
||||||
|
|
||||||
|
if not self.offline_mode:
|
||||||
|
try:
|
||||||
|
self.console.print(f"[bold green]Aktualisiere {folder_name}...[/bold green]")
|
||||||
|
|
||||||
|
# Fetch Remote Updates
|
||||||
|
repo.remotes.origin.fetch()
|
||||||
|
|
||||||
|
# Prüfen, ob HEAD und origin/branch unterschiedlich sind
|
||||||
|
local_commit = repo.head.commit
|
||||||
|
remote_ref = repo.remotes.origin.refs[repo.active_branch.name]
|
||||||
|
remote_commit = remote_ref.commit
|
||||||
|
|
||||||
|
if local_commit.hexsha != remote_commit.hexsha:
|
||||||
|
self.console.print(f"[bold yellow]Änderungen erkannt! Force-Pull wird durchgeführt...[/bold yellow]")
|
||||||
|
repo.git.reset('--hard', remote_commit.hexsha)
|
||||||
|
else:
|
||||||
|
self.console.print(f"[bold green]Repository {folder_name} ist aktuell.[/bold green]")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.console.print(f"[bold red]Warnung beim Aktualisieren: {e}[/bold red]")
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Lokales Repo fehlt → nur dann klonen
|
||||||
|
if self.offline_mode:
|
||||||
|
raise FileNotFoundError(f"Repository {folder_name} existiert lokal nicht und offline_mode ist aktiv.")
|
||||||
|
else:
|
||||||
|
self.console.print(f"[bold green]Klone {git_url}...[/bold green]")
|
||||||
|
repo = Repo.clone_from(git_url, local_path)
|
||||||
|
|
||||||
|
# Immer: Auf den richtigen Commit/Branch wechseln
|
||||||
|
self.console.print(f"[bold green]Checkout auf[/bold green] [yellow]{rev}[/yellow] in {folder_name}")
|
||||||
|
repo.git.checkout(rev)
|
||||||
|
|
||||||
|
return local_path
|
||||||
|
|
||||||
|
def _load_project_config(self, path: str) -> ProjectConfig:
|
||||||
|
"""
|
||||||
|
Lädt eine project.yml aus einem lokalen Ordner.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path (str): Basisverzeichnis des geklonten Projekts.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
ProjectConfig: Das geladene Projekt.
|
||||||
|
"""
|
||||||
|
self.console.print(f"Lade project.yml aus {path}...")
|
||||||
|
return load_project_config(path)
|
||||||
26
src/hdlbuild/models/config.py
Normal file
26
src/hdlbuild/models/config.py
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
import os
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
class DirectoryConfig(BaseModel):
|
||||||
|
dependency: str = ".hdlbuild_deps"
|
||||||
|
build: str = ".working"
|
||||||
|
report: str = "reports"
|
||||||
|
copy_target: str = "output"
|
||||||
|
|
||||||
|
def get_relative_prefix(self) -> str:
|
||||||
|
"""
|
||||||
|
Gibt den relativen Pfad von build-Verzeichnis zurück zum Hauptverzeichnis.
|
||||||
|
|
||||||
|
Beispiel:
|
||||||
|
".working" -> "../"
|
||||||
|
".build/deep" -> "../../"
|
||||||
|
"""
|
||||||
|
depth = len(os.path.normpath(self.build).split(os.sep))
|
||||||
|
return "../" * depth
|
||||||
|
|
||||||
|
DIRECTORIES = DirectoryConfig()
|
||||||
|
|
||||||
|
class GitConfig(BaseModel):
|
||||||
|
timeout: int = 10
|
||||||
|
|
||||||
|
GIT = GitConfig()
|
||||||
8
src/hdlbuild/models/dependency.py
Normal file
8
src/hdlbuild/models/dependency.py
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
# models/dependency.py
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from hdlbuild.models.project import ProjectConfig
|
||||||
|
|
||||||
|
class ResolvedDependency(BaseModel):
|
||||||
|
project: ProjectConfig
|
||||||
|
local_path: str
|
||||||
48
src/hdlbuild/models/project.py
Normal file
48
src/hdlbuild/models/project.py
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
from pydantic import BaseModel, Field
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
class SourceFile(BaseModel):
|
||||||
|
path: str
|
||||||
|
library: str = "work" # Default auf 'work'
|
||||||
|
|
||||||
|
class ToolOptions(BaseModel):
|
||||||
|
common: List[str] = Field(default_factory=list)
|
||||||
|
xst: List[str] = Field(default_factory=list)
|
||||||
|
ngdbuild: List[str] = Field(default_factory=list)
|
||||||
|
map: List[str] = Field(default_factory=list)
|
||||||
|
par: List[str] = Field(default_factory=list)
|
||||||
|
bitgen: List[str] = Field(default_factory=list)
|
||||||
|
trace: List[str] = Field(default_factory=list)
|
||||||
|
fuse: List[str] = Field(default_factory=list)
|
||||||
|
isim: List[str] = Field(default_factory=list)
|
||||||
|
|
||||||
|
class Dependency(BaseModel):
|
||||||
|
name: Optional[str] = None # Name ist jetzt optional
|
||||||
|
git: str
|
||||||
|
rev: str
|
||||||
|
library: str = "work" # Default auf 'work'
|
||||||
|
|
||||||
|
class Sources(BaseModel):
|
||||||
|
vhdl: List[SourceFile] = Field(default_factory=list)
|
||||||
|
verilog: List[SourceFile] = Field(default_factory=list)
|
||||||
|
|
||||||
|
class Testbenches(BaseModel):
|
||||||
|
vhdl: List[SourceFile] = Field(default_factory=list)
|
||||||
|
verilog: List[SourceFile] = Field(default_factory=list)
|
||||||
|
|
||||||
|
class BuildOptions(BaseModel):
|
||||||
|
build_dir: Optional[str] = "working"
|
||||||
|
report_dir: Optional[str] = "reports"
|
||||||
|
copy_target_dir: Optional[str] = "output"
|
||||||
|
|
||||||
|
class ProjectConfig(BaseModel):
|
||||||
|
name: str
|
||||||
|
topmodule: Optional[str]
|
||||||
|
target_device: str
|
||||||
|
xilinx_path: str
|
||||||
|
sources: Sources
|
||||||
|
testbenches: Optional[Testbenches] = None
|
||||||
|
constraints: Optional[str] = None
|
||||||
|
build: Optional[BuildOptions] = None
|
||||||
|
dependencies: Optional[List[Dependency]] = Field(default_factory=list)
|
||||||
|
tool_options: Optional[ToolOptions] = ToolOptions()
|
||||||
6
src/hdlbuild/templates/gitignore.template
Normal file
6
src/hdlbuild/templates/gitignore.template
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
.hdlbuild_deps/
|
||||||
|
.working/
|
||||||
|
reports/
|
||||||
|
output/
|
||||||
|
.locale/
|
||||||
|
vhdl_ls.toml
|
||||||
268
src/hdlbuild/templates/project.yml.template
Normal file
268
src/hdlbuild/templates/project.yml.template
Normal file
@@ -0,0 +1,268 @@
|
|||||||
|
name:
|
||||||
|
topmodule:
|
||||||
|
target_device: xc3s1200e-4-fg320
|
||||||
|
xilinx_path: /opt/Xilinx/14.7/ISE_DS/ISE
|
||||||
|
|
||||||
|
constraints:
|
||||||
|
|
||||||
|
sources:
|
||||||
|
vhdl:
|
||||||
|
- path: src/*.vhd
|
||||||
|
library: work
|
||||||
|
|
||||||
|
testbenches:
|
||||||
|
vhdl:
|
||||||
|
- path: tests/*.vhd
|
||||||
|
library: work
|
||||||
|
|
||||||
|
dependencies:
|
||||||
|
# - git: "https://git.0xmax42.io/maxp/Asynchronous-FIFO-AXI-Handshake.git"
|
||||||
|
# rev: "hdlbuild"
|
||||||
|
|
||||||
|
build:
|
||||||
|
build_dir: working
|
||||||
|
report_dir: reports
|
||||||
|
copy_target_dir: output
|
||||||
|
|
||||||
|
# Tool Optionen
|
||||||
|
tool_options:
|
||||||
|
common:
|
||||||
|
- "-intstyle"
|
||||||
|
- "xflow"
|
||||||
|
|
||||||
|
ngdbuild: []
|
||||||
|
|
||||||
|
map:
|
||||||
|
- "-detail"
|
||||||
|
- "-timing"
|
||||||
|
- "-ol"
|
||||||
|
- "high"
|
||||||
|
|
||||||
|
par: []
|
||||||
|
|
||||||
|
bitgen:
|
||||||
|
- "-g"
|
||||||
|
- "StartupClk:JtagClk"
|
||||||
|
|
||||||
|
trace:
|
||||||
|
- "-v"
|
||||||
|
- "3"
|
||||||
|
- "-n"
|
||||||
|
- "3"
|
||||||
|
|
||||||
|
fuse:
|
||||||
|
- "-incremental"
|
||||||
|
|
||||||
|
isim:
|
||||||
|
- "-gui"
|
||||||
|
|
||||||
|
xst:
|
||||||
|
# Optimization goal: prioritize speed or area.
|
||||||
|
# Values: Speed | Area
|
||||||
|
- "-opt_mode Speed"
|
||||||
|
|
||||||
|
# Optimization level: more aggressive optimizations at level 2.
|
||||||
|
# Values: 1 | 2
|
||||||
|
- "-opt_level 2"
|
||||||
|
|
||||||
|
# Use the new XST parser (recommended for modern designs).
|
||||||
|
# Values: yes | no
|
||||||
|
- "-use_new_parser yes"
|
||||||
|
|
||||||
|
# Preserve design hierarchy or allow flattening for optimization.
|
||||||
|
# Values: Yes | No | Soft
|
||||||
|
- "-keep_hierarchy No"
|
||||||
|
|
||||||
|
# Determines how hierarchy is preserved in the netlist.
|
||||||
|
# Values: As_Optimized | Rebuilt
|
||||||
|
- "-netlist_hierarchy As_Optimized"
|
||||||
|
|
||||||
|
# Global optimization strategy for nets.
|
||||||
|
# Values: AllClockNets | Offset_In_Before | Offset_Out_After | Inpad_To_Outpad | Max_Delay
|
||||||
|
- "-glob_opt AllClockNets"
|
||||||
|
|
||||||
|
## Misc ##
|
||||||
|
|
||||||
|
# Enable reading of IP cores.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-read_cores YES"
|
||||||
|
|
||||||
|
# Do not write timing constraints into synthesis report.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-write_timing_constraints NO"
|
||||||
|
|
||||||
|
# Analyze paths across different clock domains.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-cross_clock_analysis NO"
|
||||||
|
|
||||||
|
# Character used to separate hierarchy levels in instance names.
|
||||||
|
# Default: /
|
||||||
|
- "-hierarchy_separator /"
|
||||||
|
|
||||||
|
# Delimiters used for bus signals.
|
||||||
|
# Values: <> | [] | () | {}
|
||||||
|
- "-bus_delimiter <>"
|
||||||
|
|
||||||
|
# Maintain original case of identifiers.
|
||||||
|
# Values: Maintain | Upper | Lower
|
||||||
|
- "-case Maintain"
|
||||||
|
|
||||||
|
# Target maximum utilization ratio for slices.
|
||||||
|
# Values: 1–100
|
||||||
|
- "-slice_utilization_ratio 100"
|
||||||
|
|
||||||
|
# Target maximum utilization ratio for BRAMs.
|
||||||
|
# Values: 1–100
|
||||||
|
- "-bram_utilization_ratio 100"
|
||||||
|
|
||||||
|
# Use Verilog 2001 syntax features.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-verilog2001 YES"
|
||||||
|
|
||||||
|
#### HDL Options ####
|
||||||
|
|
||||||
|
## FSM ##
|
||||||
|
|
||||||
|
# Extract FSMs (Finite State Machines) from HDL code.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-fsm_extract YES"
|
||||||
|
|
||||||
|
# Encoding strategy for FSMs.
|
||||||
|
# Values: Auto | Gray | One-Hot | Johnson | Compact | Sequential | Speed1 | User
|
||||||
|
- "-fsm_encoding Auto"
|
||||||
|
|
||||||
|
# Add safe logic for undefined FSM states.
|
||||||
|
# Values: Yes | No
|
||||||
|
- "-safe_implementation No"
|
||||||
|
|
||||||
|
# Structure used to implement FSMs.
|
||||||
|
# Values: LUT | BRAM
|
||||||
|
- "-fsm_style LUT"
|
||||||
|
|
||||||
|
## RAM/ROM ##
|
||||||
|
|
||||||
|
# Extract RAM inference from HDL.
|
||||||
|
# Values: Yes | No
|
||||||
|
- "-ram_extract Yes"
|
||||||
|
|
||||||
|
# Style used to implement RAM.
|
||||||
|
# Values: Auto | Block | Distributed
|
||||||
|
- "-ram_style Auto"
|
||||||
|
|
||||||
|
# Extract ROM inference from HDL.
|
||||||
|
# Values: Yes | No
|
||||||
|
- "-rom_extract Yes"
|
||||||
|
|
||||||
|
# Style used for implementing ROM.
|
||||||
|
# Values: Auto | Distributed | Block
|
||||||
|
- "-rom_style Auto"
|
||||||
|
|
||||||
|
# Enable or disable automatic BRAM packing.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-auto_bram_packing NO"
|
||||||
|
|
||||||
|
## MUX/Decoder/Shift Register ##
|
||||||
|
|
||||||
|
# Extract multiplexers where possible.
|
||||||
|
# Values: Yes | No | Force
|
||||||
|
- "-mux_extract Yes"
|
||||||
|
|
||||||
|
# Style used for implementing MUX logic.
|
||||||
|
# Values: Auto | MUXCY | MUXF
|
||||||
|
- "-mux_style Auto"
|
||||||
|
|
||||||
|
# Extract decoder logic from behavioral code.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-decoder_extract YES"
|
||||||
|
|
||||||
|
# Extract and optimize priority encoder structures.
|
||||||
|
# Values: Yes | No | Force
|
||||||
|
- "-priority_extract Yes"
|
||||||
|
|
||||||
|
# Extract shift register logic.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-shreg_extract YES"
|
||||||
|
|
||||||
|
# Extract simple shift operations into dedicated hardware.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-shift_extract YES"
|
||||||
|
|
||||||
|
## Multiplier ##
|
||||||
|
|
||||||
|
# Style for implementing multipliers.
|
||||||
|
# Values: Auto | LUT | Pipe_LUT | Pipe_Block | Block
|
||||||
|
- "-mult_style Auto"
|
||||||
|
|
||||||
|
## Misc ##
|
||||||
|
|
||||||
|
# Collapse XOR trees where beneficial.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-xor_collapse YES"
|
||||||
|
|
||||||
|
# Share resources like adders or multipliers between logic blocks.
|
||||||
|
# Values: YES | NO | Force
|
||||||
|
- "-resource_sharing YES"
|
||||||
|
|
||||||
|
# Convert asynchronous resets to synchronous where possible.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-async_to_sync NO"
|
||||||
|
|
||||||
|
#### Xilinx Specific Options ####
|
||||||
|
|
||||||
|
## Optimization ##
|
||||||
|
|
||||||
|
# Enable removal of logically equivalent registers.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-equivalent_register_removal YES"
|
||||||
|
|
||||||
|
# Duplicate registers to reduce fanout or improve timing.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-register_duplication YES"
|
||||||
|
|
||||||
|
# Move registers across logic to balance timing.
|
||||||
|
# Values: Yes | No | Forward | Backward
|
||||||
|
- "-register_balancing No"
|
||||||
|
|
||||||
|
# Use clock enable signals where possible.
|
||||||
|
# Values: Auto | Yes | No
|
||||||
|
- "-use_clock_enable Yes"
|
||||||
|
|
||||||
|
# Use synchronous set (preset) signals when available.
|
||||||
|
# Values: Auto | Yes | No
|
||||||
|
- "-use_sync_set Yes"
|
||||||
|
|
||||||
|
# Use synchronous reset signals where possible.
|
||||||
|
# Values: Auto | Yes | No
|
||||||
|
- "-use_sync_reset Yes"
|
||||||
|
|
||||||
|
## I/O ##
|
||||||
|
|
||||||
|
# Insert IO buffers for top-level ports.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-iobuf YES"
|
||||||
|
|
||||||
|
# Placement strategy for IOB registers (Auto = let tools decide).
|
||||||
|
# Values: Auto | YES | NO
|
||||||
|
- "-iob Auto"
|
||||||
|
|
||||||
|
## Misc ##
|
||||||
|
|
||||||
|
# Maximum allowed fanout for a net.
|
||||||
|
# Values: integer (e.g., 500)
|
||||||
|
- "-max_fanout 500"
|
||||||
|
|
||||||
|
# Maximum number of BUFGs (global buffers) to use.
|
||||||
|
# Values: 0–32 (device-dependent)
|
||||||
|
- "-bufg 24"
|
||||||
|
|
||||||
|
# Enable logic packing into slices.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-slice_packing YES"
|
||||||
|
|
||||||
|
# Try to reduce the number of primitive instances used.
|
||||||
|
# Values: YES | NO
|
||||||
|
- "-optimize_primitives NO"
|
||||||
|
|
||||||
|
# Margin in percent beyond the target slice utilization.
|
||||||
|
# Values: 0–100
|
||||||
|
- "-slice_utilization_ratio_maxmargin 5"
|
||||||
30
src/hdlbuild/tools/xilinx_ise/bitgen.py
Normal file
30
src/hdlbuild/tools/xilinx_ise/bitgen.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
import subprocess
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
from typing import Optional
|
||||||
|
from hdlbuild.models.project import ProjectConfig
|
||||||
|
from hdlbuild.models.config import DIRECTORIES
|
||||||
|
from hdlbuild.tools.xilinx_ise.common import copy_file, run_tool
|
||||||
|
|
||||||
|
def run_bitgen(project: ProjectConfig):
|
||||||
|
run_tool(
|
||||||
|
project=project,
|
||||||
|
tool_executable_name="bitgen",
|
||||||
|
tool_option_attr="bitgen",
|
||||||
|
mandatory_arguments=[
|
||||||
|
"-w",
|
||||||
|
f"{project.name}.ncd",
|
||||||
|
f"{project.name}.bit"
|
||||||
|
], step_number=9, total_steps=12
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def copy_bitstream_file(project: ProjectConfig):
|
||||||
|
copy_file(
|
||||||
|
project=project,
|
||||||
|
source_filename=f"{project.name}.bit",
|
||||||
|
destination_filename=f"{project.name}.bit",
|
||||||
|
destination_dir=DIRECTORIES.copy_target,
|
||||||
|
description="Bitstream File",
|
||||||
|
step_number=10, total_steps=12
|
||||||
|
)
|
||||||
73
src/hdlbuild/tools/xilinx_ise/common.py
Normal file
73
src/hdlbuild/tools/xilinx_ise/common.py
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
import shutil
|
||||||
|
import os
|
||||||
|
from typing import Optional, List
|
||||||
|
from hdlbuild.models.project import ProjectConfig
|
||||||
|
from hdlbuild.models.config import DIRECTORIES
|
||||||
|
from hdlbuild.utils.console_utils import ConsoleTask, ConsoleUtils
|
||||||
|
from rich.console import Console
|
||||||
|
|
||||||
|
def run_tool(
|
||||||
|
project: ProjectConfig,
|
||||||
|
tool_executable_name: str,
|
||||||
|
mandatory_arguments: List[str],
|
||||||
|
tool_option_attr: Optional[str] = None,
|
||||||
|
working_dir: Optional[str] = None,
|
||||||
|
silent: bool = False,
|
||||||
|
step_number: Optional[int] = None,
|
||||||
|
total_steps: Optional[int] = None
|
||||||
|
):
|
||||||
|
if working_dir is None:
|
||||||
|
working_dir = DIRECTORIES.build
|
||||||
|
|
||||||
|
xilinx_bin_dir = os.path.join(project.xilinx_path, "bin", "lin64")
|
||||||
|
tool_executable = os.path.join(xilinx_bin_dir, tool_executable_name)
|
||||||
|
|
||||||
|
if not os.path.exists(tool_executable):
|
||||||
|
raise FileNotFoundError(f"Executable nicht gefunden: {tool_executable}")
|
||||||
|
|
||||||
|
cmd = [tool_executable]
|
||||||
|
|
||||||
|
if project.tool_options and project.tool_options.common:
|
||||||
|
cmd.extend(project.tool_options.common)
|
||||||
|
|
||||||
|
if tool_option_attr and project.tool_options:
|
||||||
|
tool_opts = getattr(project.tool_options, tool_option_attr, [])
|
||||||
|
if tool_opts:
|
||||||
|
cmd.extend(tool_opts)
|
||||||
|
|
||||||
|
cmd.extend(mandatory_arguments)
|
||||||
|
|
||||||
|
task = ConsoleTask("hdlbuild", tool_executable_name.upper(), step_number, total_steps)
|
||||||
|
task.run_command(cmd, cwd=working_dir, silent=silent)
|
||||||
|
|
||||||
|
|
||||||
|
def copy_file(
|
||||||
|
project: ProjectConfig,
|
||||||
|
source_filename: str,
|
||||||
|
destination_filename: str,
|
||||||
|
description: str = "Report",
|
||||||
|
destination_dir: str = DIRECTORIES.report,
|
||||||
|
step_number: Optional[int] = None,
|
||||||
|
total_steps: Optional[int] = None
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Kopiert eine beliebige Report-Datei vom Build- in das Report-Verzeichnis.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project (ProjectConfig): Geladene Projektkonfiguration
|
||||||
|
source_filename (str): Name der Quelldatei im Build-Ordner
|
||||||
|
destination_filename (str): Neuer Name der Zieldatei im Report-Ordner
|
||||||
|
description (str): Optionale Beschreibung für die Ausgabe (z.B. "Synthesis Report")
|
||||||
|
"""
|
||||||
|
src_path = os.path.join(DIRECTORIES.build, source_filename)
|
||||||
|
dst_path = os.path.join(destination_dir, destination_filename)
|
||||||
|
|
||||||
|
if not os.path.exists(src_path):
|
||||||
|
raise FileNotFoundError(f"{description} nicht gefunden: {src_path}")
|
||||||
|
|
||||||
|
os.makedirs(destination_dir, exist_ok=True)
|
||||||
|
|
||||||
|
shutil.copyfile(src_path, dst_path)
|
||||||
|
|
||||||
|
util = ConsoleUtils("hdlbuild", step_number, total_steps)
|
||||||
|
util.print(f"{description} kopiert nach {dst_path}")
|
||||||
140
src/hdlbuild/tools/xilinx_ise/isim.py
Normal file
140
src/hdlbuild/tools/xilinx_ise/isim.py
Normal file
@@ -0,0 +1,140 @@
|
|||||||
|
import os
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from hdlbuild.models.project import ProjectConfig
|
||||||
|
from hdlbuild.models.config import DIRECTORIES
|
||||||
|
from hdlbuild.dependencies.resolver import DependencyResolver
|
||||||
|
from hdlbuild.tools.xilinx_ise.common import run_tool
|
||||||
|
from hdlbuild.utils.console_utils import ConsoleTask
|
||||||
|
from hdlbuild.utils.source_resolver import expand_all_sources, expand_testbenches
|
||||||
|
|
||||||
|
def generate_simulation_project_file(project: ProjectConfig, output_path: str, testbench_name: str):
|
||||||
|
"""
|
||||||
|
Generiert die ISim Simulationsprojektdatei (.prj).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project (ProjectConfig): Das Hauptprojekt.
|
||||||
|
output_path (str): Zielpfad für die .prj Datei.
|
||||||
|
testbench_name (str): Name der Testbench-Datei (z.B. "VGATimingGenerator_test_tb").
|
||||||
|
"""
|
||||||
|
resolver = DependencyResolver(project, offline_mode=True)
|
||||||
|
resolver.resolve_all()
|
||||||
|
|
||||||
|
vhdl_sources, verilog_sources = expand_all_sources(project, resolver.resolved)
|
||||||
|
|
||||||
|
with open(output_path, "w") as f:
|
||||||
|
# Normale VHDL-Sources
|
||||||
|
for lib, file in vhdl_sources:
|
||||||
|
f.write(f"vhdl {lib} \"{DIRECTORIES.get_relative_prefix()}{file}\"\n")
|
||||||
|
|
||||||
|
# Normale Verilog-Sources
|
||||||
|
for lib, file in verilog_sources:
|
||||||
|
f.write(f"verilog {lib} \"{DIRECTORIES.get_relative_prefix()}{file}\"\n")
|
||||||
|
|
||||||
|
# Testbench-Datei suchen und einfügen
|
||||||
|
testbench_file = find_testbench_file(project, testbench_name)
|
||||||
|
normalized_tb = os.path.normpath(testbench_file)
|
||||||
|
f.write(f"vhdl work \"{DIRECTORIES.get_relative_prefix()}{normalized_tb}\"\n")
|
||||||
|
|
||||||
|
# glbl.v immer zuletzt
|
||||||
|
f.write(f"verilog work /opt/Xilinx/14.7/ISE_DS/ISE/verilog/src/glbl.v\n")
|
||||||
|
|
||||||
|
|
||||||
|
def find_testbench_file(project: ProjectConfig, testbench_name: str) -> str:
|
||||||
|
"""
|
||||||
|
Findet eine Testbench-Datei im Projekt anhand ihres Namens (ohne Endung, Case-Insensitive).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project (ProjectConfig): Projektdefinition.
|
||||||
|
testbench_name (str): Gesuchter Dateiname (z.B. "VGATimingGenerator_test_tb").
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Vollständiger Pfad zur Testbench-Datei.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
FileNotFoundError: Wenn die Datei nicht gefunden wurde.
|
||||||
|
"""
|
||||||
|
candidates = expand_testbenches(project)
|
||||||
|
|
||||||
|
# Vergleichswerte vorbereiten (Name ohne Endung, in Kleinbuchstaben)
|
||||||
|
search_name = os.path.splitext(testbench_name)[0].lower()
|
||||||
|
|
||||||
|
for _, filepath in candidates:
|
||||||
|
filename = os.path.basename(filepath)
|
||||||
|
filename_no_ext = os.path.splitext(filename)[0].lower()
|
||||||
|
|
||||||
|
if filename_no_ext == search_name:
|
||||||
|
return filepath
|
||||||
|
|
||||||
|
raise FileNotFoundError(f"Testbench '{testbench_name}' wurde nicht gefunden.")
|
||||||
|
|
||||||
|
def build_testbench(project: ProjectConfig, testbench_name: str):
|
||||||
|
"""
|
||||||
|
Baut eine einzelne Testbench mit FUSE.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project (ProjectConfig): Hauptprojekt-Konfiguration
|
||||||
|
testbench_name (str): Name der Testbench-Datei, z.B. "VGATimingGenerator_test_tb.vhd"
|
||||||
|
"""
|
||||||
|
# Pfade
|
||||||
|
isim_exe_name = f"isim_{testbench_name.replace('.vhd', '').replace('.v', '')}"
|
||||||
|
isim_exe_path = os.path.join(DIRECTORIES.build, isim_exe_name)
|
||||||
|
|
||||||
|
# 1. Simulation-Projektdatei generieren
|
||||||
|
generate_simulation_project_file(
|
||||||
|
project=project,
|
||||||
|
output_path=os.path.join(DIRECTORIES.build, f"{project.name}_sim.prj"),
|
||||||
|
testbench_name=testbench_name
|
||||||
|
)
|
||||||
|
|
||||||
|
# 2. FUSE-Befehl ausführen mit `run_tool`
|
||||||
|
mandatory_arguments = [
|
||||||
|
"-prj", f"{project.name}_sim.prj",
|
||||||
|
"-o", isim_exe_name,
|
||||||
|
f"work.{testbench_name.replace('.vhd', '').replace('.v', '')}",
|
||||||
|
"work.glbl"
|
||||||
|
]
|
||||||
|
|
||||||
|
run_tool(
|
||||||
|
project=project,
|
||||||
|
tool_executable_name="fuse",
|
||||||
|
tool_option_attr="fuse",
|
||||||
|
mandatory_arguments=mandatory_arguments,
|
||||||
|
working_dir=DIRECTORIES.build,
|
||||||
|
silent=False
|
||||||
|
)
|
||||||
|
|
||||||
|
def run_testbench(project: ProjectConfig, testbench_name: str):
|
||||||
|
"""
|
||||||
|
Führt eine gebaute Testbench-Executable aus (ISim Simulation).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
testbench_name (str): Name der Testbench-Datei (z.B. "VGATimingGenerator_test_tb.vhd")
|
||||||
|
"""
|
||||||
|
# Pfade
|
||||||
|
isim_exe_name = f"isim_{testbench_name.replace('.vhd', '').replace('.v', '')}"
|
||||||
|
|
||||||
|
isim_cmd_file = os.path.join(DIRECTORIES.build, f"{isim_exe_name}.cmd")
|
||||||
|
|
||||||
|
# 1. TCL-Skript für ISim erzeugen (einfache Simulation)
|
||||||
|
with open(isim_cmd_file, "w") as f:
|
||||||
|
f.write("")
|
||||||
|
|
||||||
|
cmd = [f"./{isim_exe_name}"]
|
||||||
|
|
||||||
|
tool_opts = getattr(project.tool_options, "isim", [])
|
||||||
|
if tool_opts:
|
||||||
|
cmd.extend(tool_opts)
|
||||||
|
|
||||||
|
# 2. Kommando bauen
|
||||||
|
cmd.extend([
|
||||||
|
"-tclbatch",
|
||||||
|
f"{isim_exe_name}.cmd"
|
||||||
|
])
|
||||||
|
|
||||||
|
# 3. Ausführen
|
||||||
|
task = ConsoleTask(prefix="hdlbuild", title=f"RUN {testbench_name}")
|
||||||
|
result = task.run_command(cmd, cwd=DIRECTORIES.build)
|
||||||
|
|
||||||
|
if result != 0:
|
||||||
|
raise RuntimeError(f"Testbench {testbench_name} ist während der Simulation fehlgeschlagen!")
|
||||||
34
src/hdlbuild/tools/xilinx_ise/main.py
Normal file
34
src/hdlbuild/tools/xilinx_ise/main.py
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
from hdlbuild.models.config import DIRECTORIES
|
||||||
|
from hdlbuild.models.project import ProjectConfig
|
||||||
|
from hdlbuild.tools.xilinx_ise.bitgen import copy_bitstream_file, run_bitgen
|
||||||
|
from hdlbuild.tools.xilinx_ise.map import copy_map_report, run_map
|
||||||
|
from hdlbuild.tools.xilinx_ise.ngdbuild import run_ngdbuild
|
||||||
|
from hdlbuild.tools.xilinx_ise.par import copy_par_report, copy_pinout_report, run_par
|
||||||
|
from hdlbuild.tools.xilinx_ise.trace import copy_trace_report, run_trace
|
||||||
|
from hdlbuild.tools.xilinx_ise.xst import copy_synthesis_report, generate_xst_project_file, generate_xst_script_file, run_xst
|
||||||
|
|
||||||
|
|
||||||
|
def xilinx_ise_synth(project: ProjectConfig):
|
||||||
|
generate_xst_project_file(project, f"{DIRECTORIES.build}/{project.name}.prj")
|
||||||
|
generate_xst_script_file(project, f"{DIRECTORIES.build}/{project.name}.scr")
|
||||||
|
run_xst(project)
|
||||||
|
|
||||||
|
copy_synthesis_report(project)
|
||||||
|
|
||||||
|
def xilinx_ise_all(project: ProjectConfig):
|
||||||
|
xilinx_ise_synth(project)
|
||||||
|
|
||||||
|
run_ngdbuild(project)
|
||||||
|
|
||||||
|
run_map(project)
|
||||||
|
copy_map_report(project)
|
||||||
|
|
||||||
|
run_par(project)
|
||||||
|
copy_par_report(project)
|
||||||
|
copy_pinout_report(project)
|
||||||
|
|
||||||
|
run_bitgen(project)
|
||||||
|
copy_bitstream_file(project)
|
||||||
|
|
||||||
|
run_trace(project)
|
||||||
|
copy_trace_report(project)
|
||||||
30
src/hdlbuild/tools/xilinx_ise/map.py
Normal file
30
src/hdlbuild/tools/xilinx_ise/map.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
import subprocess
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
from typing import Optional
|
||||||
|
from hdlbuild.models.project import ProjectConfig
|
||||||
|
from hdlbuild.models.config import DIRECTORIES
|
||||||
|
from hdlbuild.tools.xilinx_ise.common import copy_file, run_tool
|
||||||
|
|
||||||
|
def run_map(project: ProjectConfig):
|
||||||
|
run_tool(
|
||||||
|
project=project,
|
||||||
|
tool_executable_name="map",
|
||||||
|
tool_option_attr="map",
|
||||||
|
mandatory_arguments=[
|
||||||
|
"-p", project.target_device,
|
||||||
|
"-w",
|
||||||
|
f"{project.name}.ngd",
|
||||||
|
"-o", f"{project.name}.map.ncd",
|
||||||
|
f"{project.name}.pcf"
|
||||||
|
], step_number=4, total_steps=12
|
||||||
|
)
|
||||||
|
|
||||||
|
def copy_map_report(project: ProjectConfig):
|
||||||
|
copy_file(
|
||||||
|
project=project,
|
||||||
|
source_filename=f"{project.name}.map.mrp",
|
||||||
|
destination_filename=f"{project.name}.MapReport",
|
||||||
|
description="Map Report",
|
||||||
|
step_number=5, total_steps=12
|
||||||
|
)
|
||||||
19
src/hdlbuild/tools/xilinx_ise/ngdbuild.py
Normal file
19
src/hdlbuild/tools/xilinx_ise/ngdbuild.py
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
import subprocess
|
||||||
|
import os
|
||||||
|
from typing import Optional
|
||||||
|
from hdlbuild.models.project import ProjectConfig
|
||||||
|
from hdlbuild.models.config import DIRECTORIES
|
||||||
|
from hdlbuild.tools.xilinx_ise.common import run_tool
|
||||||
|
|
||||||
|
def run_ngdbuild(project: ProjectConfig):
|
||||||
|
run_tool(
|
||||||
|
project=project,
|
||||||
|
tool_executable_name="ngdbuild",
|
||||||
|
tool_option_attr="ngdbuild",
|
||||||
|
mandatory_arguments=[
|
||||||
|
"-p", project.target_device,
|
||||||
|
"-uc", f"{DIRECTORIES.get_relative_prefix()}{project.constraints}",
|
||||||
|
f"{project.name}.ngc",
|
||||||
|
f"{project.name}.ngd"
|
||||||
|
], step_number=3, total_steps=12
|
||||||
|
)
|
||||||
38
src/hdlbuild/tools/xilinx_ise/par.py
Normal file
38
src/hdlbuild/tools/xilinx_ise/par.py
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
import subprocess
|
||||||
|
import shutil
|
||||||
|
import os
|
||||||
|
from typing import Optional
|
||||||
|
from hdlbuild.models.project import ProjectConfig
|
||||||
|
from hdlbuild.models.config import DIRECTORIES
|
||||||
|
from hdlbuild.tools.xilinx_ise.common import copy_file, run_tool
|
||||||
|
|
||||||
|
def run_par(project: ProjectConfig):
|
||||||
|
run_tool(
|
||||||
|
project=project,
|
||||||
|
tool_executable_name="par",
|
||||||
|
tool_option_attr="par",
|
||||||
|
mandatory_arguments=[
|
||||||
|
"-w",
|
||||||
|
f"{project.name}.map.ncd",
|
||||||
|
f"{project.name}.ncd",
|
||||||
|
f"{project.name}.pcf"
|
||||||
|
], step_number=6, total_steps=12
|
||||||
|
)
|
||||||
|
|
||||||
|
def copy_par_report(project: ProjectConfig):
|
||||||
|
copy_file(
|
||||||
|
project=project,
|
||||||
|
source_filename=f"{project.name}.par",
|
||||||
|
destination_filename=f"{project.name}.PlaceRouteReport",
|
||||||
|
description="Place & Route Report",
|
||||||
|
step_number=7, total_steps=12
|
||||||
|
)
|
||||||
|
|
||||||
|
def copy_pinout_report(project: ProjectConfig):
|
||||||
|
copy_file(
|
||||||
|
project=project,
|
||||||
|
source_filename=f"{project.name}_pad.txt",
|
||||||
|
destination_filename=f"{project.name}.PinoutReport",
|
||||||
|
description="Pinout Report",
|
||||||
|
step_number=8, total_steps=12
|
||||||
|
)
|
||||||
27
src/hdlbuild/tools/xilinx_ise/trace.py
Normal file
27
src/hdlbuild/tools/xilinx_ise/trace.py
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
import subprocess
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
from typing import Optional
|
||||||
|
from hdlbuild.models.project import ProjectConfig
|
||||||
|
from hdlbuild.models.config import DIRECTORIES
|
||||||
|
from hdlbuild.tools.xilinx_ise.common import copy_file, run_tool
|
||||||
|
|
||||||
|
def run_trace(project: ProjectConfig):
|
||||||
|
run_tool(
|
||||||
|
project=project,
|
||||||
|
tool_executable_name="trce",
|
||||||
|
tool_option_attr="trace",
|
||||||
|
mandatory_arguments=[
|
||||||
|
f"{project.name}.ncd",
|
||||||
|
f"{project.name}.pcf",
|
||||||
|
], step_number=11, total_steps=12
|
||||||
|
)
|
||||||
|
|
||||||
|
def copy_trace_report(project: ProjectConfig):
|
||||||
|
copy_file(
|
||||||
|
project=project,
|
||||||
|
source_filename=f"{project.name}.twr",
|
||||||
|
destination_filename=f"{project.name}.TimingReport",
|
||||||
|
description="Timing Report",
|
||||||
|
step_number=12, total_steps=12
|
||||||
|
)
|
||||||
67
src/hdlbuild/tools/xilinx_ise/xst.py
Normal file
67
src/hdlbuild/tools/xilinx_ise/xst.py
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
from typing import Optional
|
||||||
|
from hdlbuild.dependencies.resolver import DependencyResolver
|
||||||
|
from hdlbuild.models.config import DIRECTORIES
|
||||||
|
from hdlbuild.tools.xilinx_ise.common import copy_file, run_tool
|
||||||
|
from hdlbuild.utils.source_resolver import expand_all_sources
|
||||||
|
from hdlbuild.models.project import ProjectConfig
|
||||||
|
import subprocess
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
def generate_xst_project_file(project: ProjectConfig, output_path: str):
|
||||||
|
"""
|
||||||
|
Generiert die XST .prj-Datei mit allen Quellcodes.
|
||||||
|
"""
|
||||||
|
with open(output_path, "w") as f:
|
||||||
|
resolver = DependencyResolver(project, offline_mode=True)
|
||||||
|
resolver.resolve_all()
|
||||||
|
vhdl_sources, verilog_sources = expand_all_sources(project, resolver.resolved)
|
||||||
|
|
||||||
|
for lib, file in vhdl_sources:
|
||||||
|
f.write(f"vhdl {lib} \"{DIRECTORIES.get_relative_prefix()}{file}\"\n")
|
||||||
|
|
||||||
|
for lib, file in verilog_sources:
|
||||||
|
f.write(f"verilog {lib} \"{DIRECTORIES.get_relative_prefix()}{file}\"\n")
|
||||||
|
|
||||||
|
# Optionale Dependencies
|
||||||
|
if project.dependencies:
|
||||||
|
for dep in project.dependencies:
|
||||||
|
# Hier könnte man noch spezielle Sources aus dep.path expandieren
|
||||||
|
pass
|
||||||
|
|
||||||
|
def generate_xst_script_file(project: ProjectConfig, output_path: str):
|
||||||
|
"""
|
||||||
|
Generiert die XST .scr-Datei mit den Synthese-Optionen.
|
||||||
|
"""
|
||||||
|
with open(output_path, "w") as f:
|
||||||
|
f.write(f"run ")
|
||||||
|
f.write(f"-ifn {project.name}.prj ")
|
||||||
|
f.write(f"-ofn {project.name}.ngc ")
|
||||||
|
f.write(f"-ifmt mixed ")
|
||||||
|
|
||||||
|
if project.tool_options and project.tool_options.xst:
|
||||||
|
for opt in project.tool_options.xst:
|
||||||
|
f.write(f"{opt} ")
|
||||||
|
|
||||||
|
f.write(f"-top {project.topmodule} ")
|
||||||
|
f.write(f"-ofmt NGC ")
|
||||||
|
f.write(f"-p {project.target_device} ")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def run_xst(project: ProjectConfig):
|
||||||
|
run_tool(
|
||||||
|
project=project,
|
||||||
|
tool_executable_name="xst",
|
||||||
|
mandatory_arguments=["-ifn", f"{project.name}.scr",
|
||||||
|
], step_number=1, total_steps=12
|
||||||
|
)
|
||||||
|
|
||||||
|
def copy_synthesis_report(project: ProjectConfig):
|
||||||
|
copy_file(
|
||||||
|
project=project,
|
||||||
|
source_filename=f"{project.name}.srp",
|
||||||
|
destination_filename=f"{project.name}.SynthesisReport",
|
||||||
|
description="Synthesebericht",
|
||||||
|
step_number=2, total_steps=12
|
||||||
|
)
|
||||||
176
src/hdlbuild/utils/console_utils.py
Normal file
176
src/hdlbuild/utils/console_utils.py
Normal file
@@ -0,0 +1,176 @@
|
|||||||
|
import sys
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
import subprocess
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
from rich.console import Console
|
||||||
|
from rich.live import Live
|
||||||
|
from rich.text import Text
|
||||||
|
from rich.markup import render
|
||||||
|
|
||||||
|
class ConsoleTask:
|
||||||
|
def __init__(self, prefix:str, title: str, step_number: Optional[int] = None, total_steps: Optional[int] = None, max_log_lines: int = 10):
|
||||||
|
self.prefix = prefix
|
||||||
|
self.title = title
|
||||||
|
self.step_number = step_number
|
||||||
|
self.total_steps = total_steps
|
||||||
|
self.max_log_lines = max_log_lines
|
||||||
|
self.spinner_cycle = ['|', '/', '-', '\\']
|
||||||
|
self.stop_event = threading.Event()
|
||||||
|
self.spinner_thread: Optional[threading.Thread] = None
|
||||||
|
self.output_lines: List[str] = []
|
||||||
|
self.all_lines: List[str] = []
|
||||||
|
self._stdout_lock = threading.Lock()
|
||||||
|
self.console = Console()
|
||||||
|
self.live: Optional[Live] = None
|
||||||
|
self.spinner_idx = 0
|
||||||
|
|
||||||
|
def start_spinner(self):
|
||||||
|
self.live = Live(console=self.console, refresh_per_second=30, transient=True)
|
||||||
|
self.live.start()
|
||||||
|
self.spinner_thread = threading.Thread(target=self._spinner_task, daemon=True)
|
||||||
|
self.spinner_thread.start()
|
||||||
|
|
||||||
|
def _spinner_task(self):
|
||||||
|
while not self.stop_event.is_set():
|
||||||
|
with self._stdout_lock:
|
||||||
|
self._redraw_spinner()
|
||||||
|
self.spinner_idx += 1
|
||||||
|
time.sleep(0.1)
|
||||||
|
|
||||||
|
def _render_content(self) -> Text:
|
||||||
|
visible_lines = self.output_lines[-self.max_log_lines:]
|
||||||
|
|
||||||
|
prefix_text = f"[grey50]\[{self.prefix}][/grey50]" if self.prefix else ""
|
||||||
|
step_text = f"[bold blue]Step {self.step_number}/{self.total_steps}[/bold blue]" if self.step_number and self.total_steps else ""
|
||||||
|
title_text = f"[bold]{self.title}[/bold]" if self.title else ""
|
||||||
|
|
||||||
|
spinner_markup = f"{prefix_text} {step_text} {title_text} {self.spinner_cycle[self.spinner_idx % len(self.spinner_cycle)]}"
|
||||||
|
|
||||||
|
spinner_text = Text.from_markup(spinner_markup)
|
||||||
|
log_text = Text("\n".join(visible_lines))
|
||||||
|
|
||||||
|
full_text = spinner_text + Text("\n") + log_text
|
||||||
|
|
||||||
|
return full_text
|
||||||
|
|
||||||
|
def _redraw_spinner(self):
|
||||||
|
if self.live:
|
||||||
|
self.live.update(self._render_content())
|
||||||
|
|
||||||
|
def log(self, message: str):
|
||||||
|
with self._stdout_lock:
|
||||||
|
self.all_lines.append(message)
|
||||||
|
self.output_lines.append(message)
|
||||||
|
if len(self.output_lines) > self.max_log_lines:
|
||||||
|
self.output_lines = self.output_lines[-self.max_log_lines:]
|
||||||
|
|
||||||
|
if self.live:
|
||||||
|
self.live.update(self._render_content())
|
||||||
|
|
||||||
|
|
||||||
|
def run_command(self, cmd: List[str], cwd: Optional[str] = None, silent: bool = False) -> int:
|
||||||
|
success = False
|
||||||
|
start_time = time.time()
|
||||||
|
|
||||||
|
self.start_spinner()
|
||||||
|
|
||||||
|
try:
|
||||||
|
if silent:
|
||||||
|
subprocess.run(cmd, cwd=cwd, check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
||||||
|
success = True
|
||||||
|
else:
|
||||||
|
process = subprocess.Popen(cmd, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True)
|
||||||
|
if process.stdout is None:
|
||||||
|
raise ValueError("Failed to capture stdout")
|
||||||
|
|
||||||
|
while True:
|
||||||
|
line = process.stdout.readline()
|
||||||
|
if not line and process.poll() is not None:
|
||||||
|
break
|
||||||
|
if line:
|
||||||
|
self.log(line.rstrip())
|
||||||
|
|
||||||
|
success = (process.returncode == 0)
|
||||||
|
|
||||||
|
finally:
|
||||||
|
self.stop_event.set()
|
||||||
|
if self.spinner_thread:
|
||||||
|
self.spinner_thread.join()
|
||||||
|
|
||||||
|
duration = time.time() - start_time
|
||||||
|
|
||||||
|
with self._stdout_lock:
|
||||||
|
self._finalize_output(success, duration)
|
||||||
|
|
||||||
|
if not success:
|
||||||
|
# Schöne Fehlerausgabe und kontrolliertes Beenden
|
||||||
|
self.console.print("\n[bold red]❌ Fehler beim Ausführen des Kommandos:[/bold red]")
|
||||||
|
for line in self.all_lines:
|
||||||
|
self.console.print(f"[red]{line}[/red]")
|
||||||
|
sys.exit(1) # ❗ Hier: hartes, aber sauberes Beenden des Programms
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def _finalize_output(self, success: bool, duration: float):
|
||||||
|
if self.live:
|
||||||
|
self.live.stop()
|
||||||
|
|
||||||
|
prefix_text = f"[grey50]\[{self.prefix}][/grey50]" if self.prefix else ""
|
||||||
|
status_symbol = "[green]✅[/green]" if success else "[red]❌[/red]"
|
||||||
|
step_text = f"[bold blue]Step {self.step_number}/{self.total_steps}[/bold blue]" if self.step_number and self.total_steps else ""
|
||||||
|
status_title = f"[bold green]{self.title}[/bold green]" if success else f"[bold red]{self.title}[/bold red]"
|
||||||
|
final_line = f"{prefix_text} {step_text} {status_title} {status_symbol} [bold green]({duration:.1f}s[/bold green])"
|
||||||
|
|
||||||
|
# Final full output
|
||||||
|
self.console.print(final_line)
|
||||||
|
|
||||||
|
class ConsoleUtils:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
prefix: str = "hdlbuild",
|
||||||
|
step_number: Optional[int] = None,
|
||||||
|
total_steps: Optional[int] = None,
|
||||||
|
live: bool = False
|
||||||
|
):
|
||||||
|
self.prefix = prefix
|
||||||
|
self.step_number = step_number
|
||||||
|
self.total_steps = total_steps
|
||||||
|
self.console = Console()
|
||||||
|
self.live_mode = live
|
||||||
|
self.live: Optional[Live] = None
|
||||||
|
self.messages: List[str] = []
|
||||||
|
|
||||||
|
def start_live(self):
|
||||||
|
"""Startet den Live-Modus."""
|
||||||
|
if self.live_mode and self.live is None:
|
||||||
|
self.live = Live(console=self.console, refresh_per_second=10, transient=True)
|
||||||
|
self.live.start()
|
||||||
|
|
||||||
|
def print(self, message: str):
|
||||||
|
prefix = f"[grey50]\[{self.prefix}][/grey50]" if self.prefix else ""
|
||||||
|
step_info = f"[bold blue]Step {self.step_number}/{self.total_steps}[/bold blue]" if self.step_number and self.total_steps else ""
|
||||||
|
full_message = f"{prefix} {step_info} {message}"
|
||||||
|
|
||||||
|
if self.live_mode and self.live:
|
||||||
|
self.messages.append(full_message)
|
||||||
|
rendered_lines = [Text.from_markup(line) for line in self.messages]
|
||||||
|
combined = Text()
|
||||||
|
for line in rendered_lines:
|
||||||
|
combined.append(line)
|
||||||
|
combined.append("\n")
|
||||||
|
self.live.update(combined)
|
||||||
|
else:
|
||||||
|
self.console.print(full_message)
|
||||||
|
|
||||||
|
def stop_live(self, final_message: Optional[str] = None):
|
||||||
|
"""Beendet den Live-Modus, löscht alte Ausgaben und zeigt eine Abschlussnachricht."""
|
||||||
|
if self.live_mode and self.live:
|
||||||
|
self.live.stop()
|
||||||
|
self.live = None
|
||||||
|
self.messages.clear() # Alte Messages verwerfen
|
||||||
|
|
||||||
|
if final_message:
|
||||||
|
self.console.print(final_message)
|
||||||
57
src/hdlbuild/utils/directory_manager.py
Normal file
57
src/hdlbuild/utils/directory_manager.py
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
from hdlbuild.models.config import DIRECTORIES
|
||||||
|
from hdlbuild.utils.console_utils import ConsoleUtils
|
||||||
|
|
||||||
|
def ensure_directories_exist(silent: bool = False):
|
||||||
|
"""
|
||||||
|
Erstellt alle in der Konfiguration definierten Verzeichnisse, falls sie nicht existieren.
|
||||||
|
"""
|
||||||
|
console_utils = None
|
||||||
|
if not silent:
|
||||||
|
console_utils = ConsoleUtils("hdlbuild")
|
||||||
|
|
||||||
|
for name, path in DIRECTORIES.dict().items():
|
||||||
|
if not os.path.exists(path):
|
||||||
|
os.makedirs(path, exist_ok=True)
|
||||||
|
if not silent and console_utils:
|
||||||
|
console_utils.print(f"Verzeichnis erstellt: {path}")
|
||||||
|
else:
|
||||||
|
if not silent and console_utils:
|
||||||
|
console_utils.print(f"[hdlbuild] Verzeichnis vorhanden: {path}")
|
||||||
|
|
||||||
|
def clear_directories(silent: bool = False):
|
||||||
|
"""
|
||||||
|
Löscht alle in der Konfiguration definierten Verzeichnisse, falls sie existieren.
|
||||||
|
"""
|
||||||
|
console_utils = None
|
||||||
|
if not silent:
|
||||||
|
console_utils = ConsoleUtils("hdlbuild")
|
||||||
|
|
||||||
|
for name, path in DIRECTORIES.dict().items():
|
||||||
|
if os.path.exists(path):
|
||||||
|
if not silent and console_utils:
|
||||||
|
console_utils.print(f"Lösche Verzeichnis: {path}")
|
||||||
|
shutil.rmtree(path)
|
||||||
|
else:
|
||||||
|
if not silent and console_utils:
|
||||||
|
console_utils.print(f"Verzeichnis nicht vorhanden, übersprungen: {path}")
|
||||||
|
|
||||||
|
def clear_build_directories(silent: bool = False):
|
||||||
|
"""
|
||||||
|
Löscht alle in der Konfiguration definierten Verzeichnisse, falls sie existieren.
|
||||||
|
"""
|
||||||
|
console_utils = None
|
||||||
|
if not silent:
|
||||||
|
console_utils = ConsoleUtils("hdlbuild")
|
||||||
|
|
||||||
|
for name, path in DIRECTORIES.dict().items():
|
||||||
|
if name == "dependency":
|
||||||
|
continue
|
||||||
|
if os.path.exists(path):
|
||||||
|
if not silent and console_utils:
|
||||||
|
console_utils.print(f"Lösche Verzeichnis: {path}")
|
||||||
|
shutil.rmtree(path)
|
||||||
|
else:
|
||||||
|
if not silent and console_utils:
|
||||||
|
console_utils.print(f"Verzeichnis nicht vorhanden, übersprungen: {path}")
|
||||||
16
src/hdlbuild/utils/project_loader.py
Normal file
16
src/hdlbuild/utils/project_loader.py
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
import yaml
|
||||||
|
from hdlbuild.models.project import ProjectConfig
|
||||||
|
|
||||||
|
def load_project_config(path: str = "project.yml") -> ProjectConfig:
|
||||||
|
"""
|
||||||
|
Lädt die Projektkonfiguration aus einer YAML-Datei und gibt ein typisiertes ProjectConfig-Objekt zurück.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path (str): Pfad zur project.yml Datei (Default: "project.yml")
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
ProjectConfig: Geparstes und typisiertes Projektkonfigurationsobjekt
|
||||||
|
"""
|
||||||
|
with open(path, "r") as file:
|
||||||
|
raw_data = yaml.safe_load(file)
|
||||||
|
return ProjectConfig(**raw_data)
|
||||||
100
src/hdlbuild/utils/source_resolver.py
Normal file
100
src/hdlbuild/utils/source_resolver.py
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
# src/hdlbuild/utils/source_resolver.py
|
||||||
|
|
||||||
|
import glob
|
||||||
|
import os
|
||||||
|
from typing import List, Tuple
|
||||||
|
from hdlbuild.models.project import SourceFile, ProjectConfig
|
||||||
|
from hdlbuild.models.dependency import ResolvedDependency
|
||||||
|
|
||||||
|
def _expand_project_sources(project: ProjectConfig, project_root: str) -> Tuple[List[Tuple[str, str]], List[Tuple[str, str]]]:
|
||||||
|
"""
|
||||||
|
Expandiert die Quellen eines einzelnen Projekts, getrennt nach VHDL und Verilog.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project (ProjectConfig): Das Projekt, dessen Quellen expandiert werden sollen.
|
||||||
|
project_root (str): Basisverzeichnis, von dem aus die Pfade aufgelöst werden.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple: (List of (library, filepath) für VHDL, List of (library, filepath) für Verilog)
|
||||||
|
"""
|
||||||
|
vhdl_expanded = []
|
||||||
|
verilog_expanded = []
|
||||||
|
|
||||||
|
# VHDL-Sources
|
||||||
|
for source in project.sources.vhdl:
|
||||||
|
full_pattern = os.path.join(project_root, source.path)
|
||||||
|
matched_files = glob.glob(full_pattern, recursive=True)
|
||||||
|
for file in matched_files:
|
||||||
|
normalized_path = os.path.normpath(file)
|
||||||
|
vhdl_expanded.append((source.library, normalized_path))
|
||||||
|
|
||||||
|
# Verilog-Sources
|
||||||
|
for source in project.sources.verilog:
|
||||||
|
full_pattern = os.path.join(project_root, source.path)
|
||||||
|
matched_files = glob.glob(full_pattern, recursive=True)
|
||||||
|
for file in matched_files:
|
||||||
|
normalized_path = os.path.normpath(file)
|
||||||
|
verilog_expanded.append((source.library, normalized_path))
|
||||||
|
|
||||||
|
return vhdl_expanded, verilog_expanded
|
||||||
|
|
||||||
|
def expand_all_sources(root_project: ProjectConfig, resolved_dependencies: List[ResolvedDependency]) -> Tuple[List[Tuple[str, str]], List[Tuple[str, str]]]:
|
||||||
|
"""
|
||||||
|
Expandiert alle Quellen aus dem Root-Projekt und allen Dependencies, getrennt nach VHDL und Verilog.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
root_project (ProjectConfig): Das Hauptprojekt
|
||||||
|
resolved_dependencies (List[ResolvedDependency]): Alle rekursiv aufgelösten Dependencies
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple:
|
||||||
|
- List of (library, filepath) für VHDL
|
||||||
|
- List of (library, filepath) für Verilog
|
||||||
|
"""
|
||||||
|
all_vhdl_sources = []
|
||||||
|
all_verilog_sources = []
|
||||||
|
|
||||||
|
# Root-Projekt expandieren
|
||||||
|
vhdl_sources, verilog_sources = _expand_project_sources(root_project, ".")
|
||||||
|
all_vhdl_sources.extend(vhdl_sources)
|
||||||
|
all_verilog_sources.extend(verilog_sources)
|
||||||
|
|
||||||
|
# Dependencies expandieren
|
||||||
|
for dep in resolved_dependencies:
|
||||||
|
vhdl_dep, verilog_dep = _expand_project_sources(dep.project, dep.local_path)
|
||||||
|
all_vhdl_sources.extend(vhdl_dep)
|
||||||
|
all_verilog_sources.extend(verilog_dep)
|
||||||
|
|
||||||
|
return all_vhdl_sources, all_verilog_sources
|
||||||
|
|
||||||
|
|
||||||
|
def expand_testbenches(project: ProjectConfig) -> List[Tuple[str, str]]:
|
||||||
|
"""
|
||||||
|
Expandiert nur die Testbenches (vhdl und verilog) aus dem Hauptprojekt.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project (ProjectConfig): Das Hauptprojekt.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of (library, filepath) Tupel
|
||||||
|
"""
|
||||||
|
expanded = []
|
||||||
|
|
||||||
|
if project.testbenches:
|
||||||
|
# VHDL Testbenches
|
||||||
|
for source in project.testbenches.vhdl:
|
||||||
|
full_pattern = os.path.join(".", source.path)
|
||||||
|
matched_files = glob.glob(full_pattern, recursive=True)
|
||||||
|
for file in matched_files:
|
||||||
|
normalized = os.path.normpath(file)
|
||||||
|
expanded.append((source.library, normalized))
|
||||||
|
|
||||||
|
# Verilog Testbenches (optional)
|
||||||
|
for source in project.testbenches.verilog:
|
||||||
|
full_pattern = os.path.join(".", source.path)
|
||||||
|
matched_files = glob.glob(full_pattern, recursive=True)
|
||||||
|
for file in matched_files:
|
||||||
|
normalized = os.path.normpath(file)
|
||||||
|
expanded.append((source.library, normalized))
|
||||||
|
|
||||||
|
return expanded
|
||||||
Reference in New Issue
Block a user