diff --git a/CLAUDE.md b/CLAUDE.md index 1df34ba6..c8ac143a 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -47,7 +47,7 @@ chipflow-lib is a Python library for working with the ChipFlow platform, enablin - `chipflow.toml`: User project configuration file (must exist in `CHIPFLOW_ROOT`) - `config_models.py`: Pydantic models defining configuration schema - `config.py`: Configuration file parsing logic - - Key configuration sections: `[chipflow]`, `[chipflow.silicon]`, `[chipflow.simulation]`, `[chipflow.software]`, `[chipflow.test]` + - Key configuration sections: `[chipflow]`, `[chipflow.silicon]`, `[chipflow.silicon.macros]`, `[chipflow.simulation]`, `[chipflow.software]`, `[chipflow.test]` 3. **Platform Abstraction** (`platforms/`): - `SiliconPlatform`: Targets ASIC fabrication (supports SKY130, GF180, GF130BCD, IHP_SG13G2, HELVELLYN2) @@ -77,6 +77,11 @@ chipflow-lib is a Python library for working with the ChipFlow platform, enablin - `IOModel` configures electrical characteristics (drive mode, trip point, inversion) - Annotations attach metadata to Amaranth components for automatic pin allocation +7. **RTL Wrapping** (`chipflow/rtl/`): + - `load_wrapper_from_toml()` wraps external Verilog/SystemVerilog/SpinalHDL modules as Amaranth components from a TOML description + - `load_blackbox_wrapper()` wraps NDA / third-party hard macros declared in `[chipflow.silicon.macros]`, consuming a `*.blackbox.json` produced by the sibling [macrostrip](https://github.com/ChipFlow/macrostrip) tool + - At submit time, `SiliconPlatform._macros` is bundled into `macros.tar.gz` (manifest.json + per-macro LEF / Liberty / GDS / stub) and sent as a third multipart field alongside RTLIL and config + ### Key Design Patterns 1. **Component Discovery via Configuration**: diff --git a/chipflow/config/models.py b/chipflow/config/models.py index dea7333e..d89f6622 100644 --- a/chipflow/config/models.py +++ b/chipflow/config/models.py @@ -47,6 +47,18 @@ class VoltageRange(SelectiveSerializationModel): typical: Annotated[Optional[Voltage], OmitIfNone()] = None +class MacroDecl(BaseModel): + """Declaration of an NDA / third-party hard macro packaged by + `macrostrip` (or a conformant tool). + + Minimum: a path to a ``*.blackbox.json`` describing the macro. The + JSON itself carries the companion artifact paths (LEF, Liberty, + frame-view GDS, Verilog stub). Paths inside the JSON are interpreted + relative to the JSON's own directory. + """ + blackbox: Path + + class SiliconConfig(BaseModel): """Configuration for silicon in chipflow.toml.""" process: 'Process' @@ -54,6 +66,7 @@ class SiliconConfig(BaseModel): power: Dict[str, Voltage] = {} debug: Optional[Dict[str, bool]] = None # This is still kept around to allow forcing pad locations. + macros: Dict[str, MacroDecl] = {} class SimulationConfig(BaseModel): """Configuration for simulation settings.""" diff --git a/chipflow/platform/silicon.py b/chipflow/platform/silicon.py index 67bbad00..ec24dd75 100644 --- a/chipflow/platform/silicon.py +++ b/chipflow/platform/silicon.py @@ -405,6 +405,7 @@ def __init__(self, config: 'Config'): self._config = config self._ports = {} self._files = {} + self._macros: dict[str, dict] = {} self._pinlock = None @property @@ -476,6 +477,67 @@ def add_file(self, filename, content): assert isinstance(content, bytes) self._files[str(filename)] = content + def add_macro(self, logical_name: str) -> dict: + """Register an NDA / third-party hard macro with the platform. + + Looks up ``logical_name`` in ``[chipflow.silicon.macros]``, loads + the referenced ``*.blackbox.json``, resolves its companion file + paths relative to the JSON directory, and stores the resulting + entry in ``self._macros`` for later bundling by the submit step. + + Returns the stored entry. Idempotent when called repeatedly with + the same logical name (the same entry is returned). + """ + assert self._config.chipflow.silicon is not None + macros_cfg = self._config.chipflow.silicon.macros + if logical_name not in macros_cfg: + raise ChipFlowError( + f"Macro '{logical_name}' is not declared in " + f"[chipflow.silicon.macros]. Known macros: " + f"{sorted(macros_cfg.keys()) or '(none)'}" + ) + + if logical_name in self._macros: + return self._macros[logical_name] + + from ..utils import ensure_chipflow_root + root = ensure_chipflow_root() + bb_path = Path(macros_cfg[logical_name].blackbox) + if not bb_path.is_absolute(): + bb_path = (root / bb_path).resolve() + if not bb_path.exists(): + raise ChipFlowError( + f"Macro '{logical_name}': blackbox JSON not found at {bb_path}" + ) + + import json as _json + bb = _json.loads(bb_path.read_text()) + if bb.get("version") != "1": + raise ChipFlowError( + f"Macro '{logical_name}': unsupported blackbox JSON version " + f"{bb.get('version')!r} (expected '1')" + ) + + files: dict[str, Path] = {} + for key, rel in (bb.get("files") or {}).items(): + resolved = (bb_path.parent / rel).resolve() + if not resolved.exists(): + raise ChipFlowError( + f"Macro '{logical_name}': companion file '{key}' " + f"referenced by {bb_path} not found at {resolved}" + ) + files[key] = resolved + + entry = { + "logical_name": logical_name, + "name": bb["name"], + "blackbox_json": bb_path, + "blackbox": bb, + "files": files, + } + self._macros[logical_name] = entry + return entry + def _check_clock_domains(self, fragment, sync_domain=None): for clock_domain in fragment.domains.values(): if clock_domain.name != "sync" or (sync_domain is not None and diff --git a/chipflow/platform/silicon_step.py b/chipflow/platform/silicon_step.py index 6bd15e40..fc62f860 100644 --- a/chipflow/platform/silicon_step.py +++ b/chipflow/platform/silicon_step.py @@ -33,15 +33,21 @@ def _build_bundle_zip( - rtlil_path, config: str, project_name: str, process: str, package: str + rtlil_path, + config: str, + project_name: str, + process: str, + package: str, + macros: dict | None = None, ) -> bytes: """Pack the submission into a single zip with a manifest. Layout:: manifest.json - # e.g. "top.il", taken from rtlil_path - pins.lock # the pinlock JSON + # e.g. "top.il" + pins.lock # the pinlock JSON + macros// ... # only when macros are registered ``project_name`` / ``process`` / ``package`` come from chipflow.toml (``[chipflow] project_name``, ``[chipflow.silicon] process``, @@ -49,20 +55,22 @@ def _build_bundle_zip( backend's working directory naming and PDK / package selection) use these to identify and route the design without re-parsing the pinlock. - The manifest is the only contract: consumers locate the design and - pinlock payloads via ``manifest["design_file"]`` and - ``manifest["pins_lock_file"]``. Keys naming a file inside the - archive carry a ``_file`` suffix so they're distinguishable from - plain value keys (``version``, ``project``, ``process``, - ``package``); the value is a zip-relative path. ``design_file`` is - named in terms of role rather than format so the same key can carry - rtlil today, or another intermediate (Verilog, FIRRTL) tomorrow, - without renaming. Future additions (e.g. macro folders) extend the - manifest without changing this function's signature on the wire. + ``macros`` is the dict produced by ``SiliconPlatform._macros`` (logical + name → entry with ``name``, ``blackbox_json``, ``files``). Each macro's + companion files are written under ``macros//`` and surface + in the manifest as ``manifest["macros"][]`` with + ``_file``-suffixed paths so the backend's recursive + ``_bundle_files_from_manifest`` extractor picks them up. + + The manifest is the only contract: consumers locate every payload via + ``_file``-suffixed keys at any nesting depth. ``design_file`` is named + in terms of role rather than format so the same key can carry rtlil + today, or another intermediate (Verilog, FIRRTL) tomorrow, without + renaming. """ design_arc = Path(rtlil_path).name pins_lock_arc = "pins.lock" - manifest = { + manifest: dict = { "version": "1", "project": project_name, "process": process, @@ -70,6 +78,28 @@ def _build_bundle_zip( "design_file": design_arc, "pins_lock_file": pins_lock_arc, } + + file_entries: list[tuple[str, Path]] = [] # (archive_path, real_path) + if macros: + manifest_macros: dict = {} + for logical_name, entry in macros.items(): + subdir = f"macros/{logical_name}" + macro_dict: dict = {"name": entry["name"]} + for role, src in entry["files"].items(): + arc = f"{subdir}/{Path(src).name}" + # Suffix `_file` so the backend's recursive extractor finds it. + macro_dict[f"{role}_file"] = arc + file_entries.append((arc, Path(src))) + + # Emit the blackbox JSON alongside its companions for completeness. + bb_arc = f"{subdir}/{Path(entry['blackbox_json']).name}" + macro_dict.setdefault("blackbox_json_file", bb_arc) + file_entries.append((bb_arc, Path(entry["blackbox_json"]))) + + manifest_macros[logical_name] = macro_dict + + manifest["macros"] = manifest_macros + manifest_bytes = (json.dumps(manifest, indent=2) + "\n").encode("utf-8") buf = io.BytesIO() @@ -77,6 +107,8 @@ def _build_bundle_zip( zf.writestr("manifest.json", manifest_bytes) zf.writestr(pins_lock_arc, config) zf.write(str(rtlil_path), arcname=design_arc) + for arc, src in file_entries: + zf.write(str(src), arcname=arc) return buf.getvalue() @@ -235,7 +267,8 @@ def submit(self, rtlil_path, args): rtlil_path, config, self.config.chipflow.project_name, self.config.chipflow.silicon.process.value, - self.config.chipflow.silicon.package) + self.config.chipflow.silicon.package, + self.platform._macros) if args.dry_run: sp.succeed(f"✅ Design `{data['projectId']}:{data['name']}` ready for submission to ChipFlow cloud!") @@ -243,7 +276,7 @@ def submit(self, rtlil_path, args): logger.debug(f"files['config']=\n{config}") bundle_path = Path(rtlil_path).parent / "bundle.zip" bundle_path.write_bytes(bundle_bytes) - sp.info(f"Compiled submission written to `{bundle_path}` (manifest.json + rtlil + pins.lock)") + sp.info(f"Compiled submission written to `{bundle_path}` (manifest.json + rtlil + pins.lock + any macros)") return def network_err(e): diff --git a/chipflow/rtl/__init__.py b/chipflow/rtl/__init__.py index 0f181bad..db5c887e 100644 --- a/chipflow/rtl/__init__.py +++ b/chipflow/rtl/__init__.py @@ -21,6 +21,10 @@ sim.step() """ +from chipflow.rtl.blackbox import ( + BlackboxWrapper, + load_blackbox_wrapper, +) from chipflow.rtl.wrapper import ( RTLWrapper, VerilogWrapper, # Alias for backwards compatibility @@ -35,7 +39,9 @@ __all__ = [ "RTLWrapper", "VerilogWrapper", + "BlackboxWrapper", "load_wrapper_from_toml", + "load_blackbox_wrapper", "_generate_auto_map", "_infer_auto_map", "_parse_verilog_ports", diff --git a/chipflow/rtl/blackbox.py b/chipflow/rtl/blackbox.py new file mode 100644 index 00000000..79fdd779 --- /dev/null +++ b/chipflow/rtl/blackbox.py @@ -0,0 +1,229 @@ +# SPDX-License-Identifier: BSD-2-Clause +"""Blackbox macro wrapper. + +``load_blackbox_wrapper`` reads a ``*.blackbox.json`` produced by +`macrostrip `_ (or any conformant +tool) and returns an :class:`RTLWrapper` subclass that: + +- synthesizes an :class:`ExternalWrapConfig` from the JSON's pin list, +- uses the companion Verilog stub so Yosys sees the macro's port + signature during synthesis, +- registers the macro with the platform at elaborate time so the submit + step can bundle its LEF / Liberty / frame-view GDS into the upload. + +The macro is declared in ``chipflow.toml``:: + + [chipflow.silicon.macros.sram_64x64] + blackbox = "vendor/ihp/sram_64x64.blackbox.json" + +and instantiated from Python by logical name:: + + sram = load_blackbox_wrapper("sram_64x64", + clocks={"sys": "CLK"}, + resets={"sys": "RST_N"}) +""" + +from __future__ import annotations + +import json +from pathlib import Path +from typing import Dict, Optional + +from chipflow import ChipFlowError + +from .wrapper import ExternalWrapConfig, Files, Port, RTLWrapper + + +__all__ = ["load_blackbox_wrapper", "BlackboxWrapper"] + + +def _build_port_configs( + macro_name: str, + pins: list[dict], + clock_pins: set[str], + reset_pins: set[str], +) -> Dict[str, Port]: + """Translate blackbox JSON pins into RTLWrapper ``Port`` entries. + + Power/ground pins are skipped (handled at the platform/PDN level, + not wired from Amaranth). Clock and reset pins are skipped here + because they're wired via ``config.clocks`` / ``config.resets``. + ``inout`` signal pins (non power/ground) are rejected — pin-style + bidirectional wrapping isn't automated yet. + """ + ports: Dict[str, Port] = {} + for pin in pins: + name = pin["name"] + role = pin.get("role", "signal") + direction = pin["direction"] + width = int(pin["width"]) + + if role in ("power", "ground"): + continue + if name in clock_pins or name in reset_pins: + continue + + if direction == "inout": + raise ChipFlowError( + f"Macro '{macro_name}': pin '{name}' is inout and " + "cannot be auto-wrapped. Declare it explicitly as a " + "pin-style interface or omit it from the blackbox." + ) + if direction not in ("in", "out"): + raise ChipFlowError( + f"Macro '{macro_name}': pin '{name}' has unknown " + f"direction {direction!r}" + ) + + iface_cls = "In" if direction == "in" else "Out" + verilog_prefix = "i_" if direction == "in" else "o_" + ports[name] = Port( + interface=f"amaranth.lib.wiring.{iface_cls}({width})", + map=f"{verilog_prefix}{name}", + ) + return ports + + +class BlackboxWrapper(RTLWrapper): + """RTLWrapper subclass that also registers the macro with the platform. + + Shares all wrapper behaviour with :class:`RTLWrapper`; the only + difference is that :meth:`elaborate` informs the platform about the + macro's physical artifacts (LEF / Liberty / frame-view GDS) so the + submit step can bundle them. + """ + + def __init__( + self, + config: ExternalWrapConfig, + verilog_files: list[Path], + logical_name: str, + ): + super().__init__(config, verilog_files) + self._logical_name = logical_name + + def elaborate(self, platform): + if platform is not None and hasattr(platform, "add_macro"): + platform.add_macro(self._logical_name) + return super().elaborate(platform) + + +def load_blackbox_wrapper( + logical_name: str, + *, + clocks: Optional[Dict[str, str]] = None, + resets: Optional[Dict[str, str]] = None, +) -> BlackboxWrapper: + """Load a hard macro by logical name declared in ``chipflow.toml``. + + Args: + logical_name: Key under ``[chipflow.silicon.macros]``. + clocks: Amaranth clock-domain → macro pin name. e.g. + ``{"sys": "CLK"}`` wires the ``sys`` domain's clock to the + macro's LEF pin ``CLK``. + resets: Amaranth clock-domain → macro reset pin name (active-low + convention, matching :class:`RTLWrapper`). + + Returns: + A :class:`BlackboxWrapper` (a :class:`wiring.Component`) whose + signature mirrors the macro's signal pins. Power/ground pins + are omitted; clock/reset pins are omitted from the signature + and wired at elaborate time. + + Raises: + ChipFlowError: if the macro isn't declared in ``chipflow.toml``, + its blackbox JSON is missing/malformed, or a referenced + clock/reset pin isn't in the macro's pin list. + """ + clocks = dict(clocks or {}) + resets = dict(resets or {}) + + # Parse chipflow.toml and locate the declared macro. + from ..config.parser import _parse_config + from ..utils import ensure_chipflow_root + + cfg = _parse_config() + if not cfg.chipflow.silicon: + raise ChipFlowError( + "load_blackbox_wrapper requires a [chipflow.silicon] section" + ) + macros_cfg = cfg.chipflow.silicon.macros + if logical_name not in macros_cfg: + raise ChipFlowError( + f"Macro '{logical_name}' is not declared in " + f"[chipflow.silicon.macros]. Known: " + f"{sorted(macros_cfg.keys()) or '(none)'}" + ) + + root = ensure_chipflow_root() + bb_path = Path(macros_cfg[logical_name].blackbox) + if not bb_path.is_absolute(): + bb_path = (root / bb_path).resolve() + + try: + bb = json.loads(bb_path.read_text()) + except FileNotFoundError: + raise ChipFlowError( + f"Macro '{logical_name}': blackbox JSON not found at {bb_path}" + ) + except json.JSONDecodeError as e: + raise ChipFlowError( + f"Macro '{logical_name}': invalid JSON in {bb_path}: {e}" + ) + + if bb.get("version") != "1": + raise ChipFlowError( + f"Macro '{logical_name}': unsupported blackbox JSON version " + f"{bb.get('version')!r} (expected '1')" + ) + + macro_cell_name = bb["name"] + pins = bb.get("pins", []) + pin_names = {p["name"] for p in pins} + + for domain, pin_name in clocks.items(): + if pin_name not in pin_names: + raise ChipFlowError( + f"Macro '{logical_name}': clock pin '{pin_name}' " + f"(domain '{domain}') not found. Pins: {sorted(pin_names)}" + ) + for domain, pin_name in resets.items(): + if pin_name not in pin_names: + raise ChipFlowError( + f"Macro '{logical_name}': reset pin '{pin_name}' " + f"(domain '{domain}') not found. Pins: {sorted(pin_names)}" + ) + + clock_pins = set(clocks.values()) + reset_pins = set(resets.values()) + + port_configs = _build_port_configs( + macro_cell_name, pins, clock_pins, reset_pins + ) + + # The Verilog stub is needed by Yosys so the black-box Instance has + # a proper port signature during synthesis. It's optional — some + # flows might pre-supply the stub another way — but when present, + # pass it through the regular add_file path at elaborate. + verilog_files: list[Path] = [] + stub_rel = (bb.get("files") or {}).get("verilog_stub") + if stub_rel is not None: + stub_path = (bb_path.parent / stub_rel).resolve() + if not stub_path.exists(): + raise ChipFlowError( + f"Macro '{logical_name}': verilog_stub '{stub_rel}' " + f"(resolved to {stub_path}) not found" + ) + verilog_files.append(stub_path) + + # Files.path is required by the Pydantic model but the constructor + # path below doesn't use it for source discovery — we pass + # verilog_files explicitly. + config = ExternalWrapConfig( + name=macro_cell_name, + files=Files(path=bb_path.parent), + clocks=clocks, + resets=resets, + ports=port_configs, + ) + return BlackboxWrapper(config, verilog_files, logical_name) diff --git a/chipflow/rtl/wrapper.py b/chipflow/rtl/wrapper.py index 71500d7c..0da7ef60 100644 --- a/chipflow/rtl/wrapper.py +++ b/chipflow/rtl/wrapper.py @@ -903,14 +903,24 @@ def _validate_signal_bindings(self, verilog_ports: Dict[str, str]) -> None: # Track which Verilog ports are mapped mapped_ports: set[str] = set() + # Accept either the ``i_`` / ``o_`` form (the + # convention hand-written chipflow wrappers use, where the + # Verilog source itself declares ``input i_clk`` etc.) OR the + # bare ```` form (used by e.g. LEF-derived hard-macro + # stubs, where ports match the LEF pin names verbatim). + def _present(expected_prefixed: str, bare: str) -> bool: + return expected_prefixed in verilog_ports or bare in verilog_ports + # Validate clock signals for clock_name, verilog_signal in self._config.clocks.items(): expected_port = f"i_{verilog_signal}" mapped_ports.add(expected_port) - if expected_port not in verilog_ports: + mapped_ports.add(verilog_signal) + if not _present(expected_port, verilog_signal): raise ChipFlowError( f"[{self._config.name}] Clock signal '{verilog_signal}' " - f"(expecting port '{expected_port}') not found in Verilog module. " + f"(expecting port '{expected_port}' or '{verilog_signal}') " + f"not found in Verilog module. " f"Available ports: {sorted(verilog_ports.keys())}" ) @@ -918,16 +928,24 @@ def _validate_signal_bindings(self, verilog_ports: Dict[str, str]) -> None: for reset_name, verilog_signal in self._config.resets.items(): expected_port = f"i_{verilog_signal}" mapped_ports.add(expected_port) - if expected_port not in verilog_ports: + mapped_ports.add(verilog_signal) + if not _present(expected_port, verilog_signal): raise ChipFlowError( f"[{self._config.name}] Reset signal '{verilog_signal}' " - f"(expecting port '{expected_port}') not found in Verilog module. " + f"(expecting port '{expected_port}' or '{verilog_signal}') " + f"not found in Verilog module. " f"Available ports: {sorted(verilog_ports.keys())}" ) - # Collect all mapped port signals from the actual port mappings + # Collect all mapped port signals from the actual port mappings. + # Port-map values are Instance-kwarg names (``i_``/``o_``); + # record the bare names too so the unmapped-ports warning below + # doesn't false-positive on modules with bare port declarations. for port_name, port_map in self._port_mappings.items(): - mapped_ports.update(port_map.values()) + for mapped in port_map.values(): + mapped_ports.add(mapped) + if mapped.startswith(("i_", "o_")): + mapped_ports.add(mapped[2:]) # Warn about unmapped Verilog ports (excluding clk/rst which are handled specially) unmapped = set(verilog_ports.keys()) - mapped_ports diff --git a/docs/chipflow-toml-guide.rst b/docs/chipflow-toml-guide.rst index ad6513cf..65df1eef 100644 --- a/docs/chipflow-toml-guide.rst +++ b/docs/chipflow-toml-guide.rst @@ -153,6 +153,31 @@ The form of IC packaging to use +``[chipflow.silicon.macros]`` +============================= + +Optional. Declares third-party hard macros (SRAMs, PLLs, analog IP, vendor +cores) for inclusion in the build. Each entry points at a +``*.blackbox.json`` produced by `macrostrip +`__ (or any conformant tool): + +.. code-block:: TOML + + [chipflow.silicon.macros.sram_64x64] + blackbox = "vendor/ihp/sram_64x64.blackbox.json" + + [chipflow.silicon.macros.pll_core] + blackbox = "vendor/pll/pll_core.blackbox.json" + +Paths are resolved relative to ``CHIPFLOW_ROOT``. The blackbox JSON itself +points at the macro's companion files (LEF, Liberty, frame-view or real GDS, +Verilog stub), which are all bundled into ``macros.tar.gz`` at submit time. + +Python code instantiates macros by logical name via +:py:func:`chipflow.rtl.blackbox.load_blackbox_wrapper`; see +:doc:`rtl-wrapper` for the usage pattern and the NDA vs. non-NDA workflows. + + Power connections ----------------- diff --git a/docs/rtl-wrapper.rst b/docs/rtl-wrapper.rst index 683c1a67..5f9781b5 100644 --- a/docs/rtl-wrapper.rst +++ b/docs/rtl-wrapper.rst @@ -52,6 +52,60 @@ The merged parameters are emitted as ``p_=`` kwargs on the substitution (so SpinalHDL / sv2v / yosys-slang see the final values when producing Verilog). +Wrapping an NDA hard macro +-------------------------- + +For third-party / NDA hard macros shipped as a LEF + Liberty + Verilog stub, +use :py:func:`chipflow.rtl.blackbox.load_blackbox_wrapper`. The macro is +declared in ``chipflow.toml`` by logical name, pointing at a +``*.blackbox.json`` produced by `macrostrip +`__: + +.. code-block:: toml + + # chipflow.toml + [chipflow.silicon.macros.sram_64x64] + blackbox = "vendor/ihp/sram_64x64.blackbox.json" + +.. code-block:: python + + from chipflow.rtl import load_blackbox_wrapper + + sram = load_blackbox_wrapper( + "sram_64x64", + clocks={"sys": "CLK"}, + resets={"sys": "RST_N"}, + ) + m.submodules.sram = sram + +Signal pins become signature members (``In(width)`` / ``Out(width)``); power, +ground, clock, and reset pins are handled out-of-band. At submit time the +platform bundles the macro's companion files (LEF, Liberty, frame-view GDS, +Verilog stub, blackbox JSON) into a ``macros.tar.gz`` alongside the RTLIL, so +the ChipFlow backend can feed them to ORFS without the real macro layout ever +leaving customer premises. + +Non-NDA macros +~~~~~~~~~~~~~~ + +The same mechanism works for macros you're free to ship in full — no NDA, no +stripping. Point ``macrostrip blackbox`` at the *real* GDS (rather than +running ``macrostrip frame`` first): + +.. code-block:: bash + + macrostrip blackbox \ + --lef macro.lef --top MY_MACRO \ + --frame-gds macro.real.gds \ + --liberty macro.lib \ + --verilog-stub macro.v \ + -o macro.blackbox.json + +Declare and instantiate it exactly as above. The blackbox JSON schema field +is named ``frame_gds`` for historical reasons, but chipflow-lib treats it as +"the GDS to include" — frame-view or real, the submission path is identical. +Skip ``macrostrip swap`` on return: there's nothing to substitute back. + API --- @@ -61,3 +115,7 @@ API :py:class:`~chipflow.rtl.wrapper.RTLWrapper`. - :py:class:`chipflow.rtl.wrapper.ExternalWrapConfig` — Pydantic schema for the TOML configuration. +- :py:func:`chipflow.rtl.blackbox.load_blackbox_wrapper` — loader for hard + macros declared in ``[chipflow.silicon.macros]``. +- :py:class:`chipflow.rtl.blackbox.BlackboxWrapper` — the generated component + for a hard macro; subclass of :py:class:`~chipflow.rtl.wrapper.RTLWrapper`. diff --git a/tests/test_blackbox_wrapper.py b/tests/test_blackbox_wrapper.py new file mode 100644 index 00000000..fd632d2c --- /dev/null +++ b/tests/test_blackbox_wrapper.py @@ -0,0 +1,282 @@ +# SPDX-License-Identifier: BSD-2-Clause +"""Tests for chipflow.rtl.blackbox (load_blackbox_wrapper + bundle layout).""" + +from __future__ import annotations + +import io +import json +import os +import tempfile +import unittest +import zipfile +from pathlib import Path +from unittest import mock + +from chipflow import ChipFlowError +from chipflow.rtl.blackbox import BlackboxWrapper, load_blackbox_wrapper + + +def _minimal_blackbox( + name: str = "MACRO", + pins: list[dict] | None = None, + files: dict[str, str] | None = None, +) -> dict: + if pins is None: + pins = [ + {"name": "CLK", "direction": "in", "width": 1, + "msb": 0, "lsb": 0, "role": "clock"}, + {"name": "RST_N", "direction": "in", "width": 1, + "msb": 0, "lsb": 0, "role": "signal"}, + {"name": "DIN", "direction": "in", "width": 8, + "msb": 7, "lsb": 0, "role": "signal"}, + {"name": "DOUT", "direction": "out", "width": 8, + "msb": 7, "lsb": 0, "role": "signal"}, + {"name": "VDD", "direction": "inout", "width": 1, + "msb": 0, "lsb": 0, "role": "power"}, + {"name": "VSS", "direction": "inout", "width": 1, + "msb": 0, "lsb": 0, "role": "ground"}, + ] + bb: dict = { + "version": "1", + "name": name, + "boundary": {"width": 100.0, "height": 50.0}, + "pins": pins, + } + if files is not None: + bb["files"] = files + return bb + + +class _ProjectFixture: + """Creates a self-contained chipflow.toml + blackbox tree on disk.""" + + def __init__(self, tmpdir: Path, macros_section: str, blackboxes: dict[str, dict], + stubs: dict[str, str] | None = None, + extras: dict[str, dict[str, bytes]] | None = None): + self.root = tmpdir + (tmpdir / "chipflow.toml").write_text( + '[chipflow]\n' + 'project_name = "test"\n' + '\n' + '[chipflow.silicon]\n' + 'process = "sky130"\n' + 'package = "caravel"\n' + f'{macros_section}' + ) + self.blackbox_paths: dict[str, Path] = {} + for rel_path, bb in blackboxes.items(): + full = tmpdir / rel_path + full.parent.mkdir(parents=True, exist_ok=True) + full.write_text(json.dumps(bb)) + self.blackbox_paths[rel_path] = full + for stub_rel, content in (stubs or {}).items(): + full = tmpdir / stub_rel + full.parent.mkdir(parents=True, exist_ok=True) + full.write_text(content) + for rel_dir, files in (extras or {}).items(): + base = tmpdir / rel_dir + base.mkdir(parents=True, exist_ok=True) + for fname, content in files.items(): + (base / fname).write_bytes(content) + + +class LoadBlackboxWrapperTestCase(unittest.TestCase): + def setUp(self): + self.tmpdir = Path(tempfile.mkdtemp()) + # Reset the cached CHIPFLOW_ROOT between tests. + from chipflow.utils import ensure_chipflow_root + if hasattr(ensure_chipflow_root, "root"): + delattr(ensure_chipflow_root, "root") + self._prev_root = os.environ.get("CHIPFLOW_ROOT") + os.environ["CHIPFLOW_ROOT"] = str(self.tmpdir) + + def tearDown(self): + import shutil + shutil.rmtree(self.tmpdir, ignore_errors=True) + if self._prev_root is None: + os.environ.pop("CHIPFLOW_ROOT", None) + else: + os.environ["CHIPFLOW_ROOT"] = self._prev_root + from chipflow.utils import ensure_chipflow_root + if hasattr(ensure_chipflow_root, "root"): + delattr(ensure_chipflow_root, "root") + + def _fixture(self, **kwargs): + return _ProjectFixture(self.tmpdir, **kwargs) + + def test_signature_skips_power_ground_clock_reset(self): + stub = ( + "module MACRO(input CLK, input RST_N, " + "input [7:0] DIN, output [7:0] DOUT); endmodule\n" + ) + self._fixture( + macros_section=( + '[chipflow.silicon.macros.m1]\n' + 'blackbox = "vendor/m1.blackbox.json"\n' + ), + blackboxes={ + "vendor/m1.blackbox.json": _minimal_blackbox( + files={"verilog_stub": "m1.v"}, + ), + }, + stubs={"vendor/m1.v": stub}, + ) + + w = load_blackbox_wrapper( + "m1", clocks={"sys": "CLK"}, resets={"sys": "RST_N"}, + ) + members = set(w.signature.members.keys()) + # Power/ground dropped, clock/reset dropped, signals kept + self.assertEqual(members, {"DIN", "DOUT"}) + self.assertIsInstance(w, BlackboxWrapper) + + def test_unknown_macro_errors(self): + self._fixture( + macros_section='', + blackboxes={}, + ) + with self.assertRaisesRegex(ChipFlowError, "not declared"): + load_blackbox_wrapper("ghost") + + def test_missing_clock_pin_errors(self): + self._fixture( + macros_section=( + '[chipflow.silicon.macros.m1]\n' + 'blackbox = "m1.blackbox.json"\n' + ), + blackboxes={"m1.blackbox.json": _minimal_blackbox()}, + ) + with self.assertRaisesRegex(ChipFlowError, "NOTAPIN"): + load_blackbox_wrapper("m1", clocks={"sys": "NOTAPIN"}) + + def test_unsupported_version_errors(self): + bb = _minimal_blackbox() + bb["version"] = "99" + self._fixture( + macros_section=( + '[chipflow.silicon.macros.m1]\n' + 'blackbox = "m1.blackbox.json"\n' + ), + blackboxes={"m1.blackbox.json": bb}, + ) + with self.assertRaisesRegex(ChipFlowError, "version"): + load_blackbox_wrapper("m1") + + def test_inout_signal_pin_rejected(self): + pins = [ + {"name": "DATA", "direction": "inout", "width": 1, + "msb": 0, "lsb": 0, "role": "signal"}, + ] + self._fixture( + macros_section=( + '[chipflow.silicon.macros.m1]\n' + 'blackbox = "m1.blackbox.json"\n' + ), + blackboxes={"m1.blackbox.json": _minimal_blackbox(pins=pins)}, + ) + with self.assertRaisesRegex(ChipFlowError, "inout"): + load_blackbox_wrapper("m1") + + def test_elaborate_calls_add_macro(self): + self._fixture( + macros_section=( + '[chipflow.silicon.macros.m1]\n' + 'blackbox = "m1.blackbox.json"\n' + ), + blackboxes={"m1.blackbox.json": _minimal_blackbox()}, + ) + w = load_blackbox_wrapper( + "m1", clocks={"sys": "CLK"}, resets={"sys": "RST_N"}, + ) + platform = mock.MagicMock() + w.elaborate(platform) + platform.add_macro.assert_called_once_with("m1") + + +class BuildBundleWithMacrosTestCase(unittest.TestCase): + """`_build_bundle_zip` packs registered macros into the same zip and + surfaces them in the manifest under `macros//` with + `_file`-suffixed paths (which the chipflow-backend's recursive + extractor walks for).""" + + def setUp(self): + self.tmpdir = Path(tempfile.mkdtemp()) + self.rtlil = self.tmpdir / "top.il" + self.rtlil.write_text("module top(); endmodule\n") + + def tearDown(self): + import shutil + shutil.rmtree(self.tmpdir, ignore_errors=True) + + def _entry(self, logical: str, cell: str) -> dict: + subdir = self.tmpdir / logical + subdir.mkdir(parents=True, exist_ok=True) + lef = subdir / f"{cell}.lef" + lef.write_text(f"# LEF for {cell}\n") + gds = subdir / f"{cell}.gds" + gds.write_bytes(b"\x00\x01BINARY\xff") + bb_path = subdir / f"{cell}.blackbox.json" + bb_path.write_text('{"version": "1"}') + return { + "logical_name": logical, + "name": cell, + "blackbox_json": bb_path, + "blackbox": {"version": "1"}, + "files": {"lef": lef, "frame_gds": gds}, + } + + def test_no_macros_omits_macros_key(self): + from chipflow.platform.silicon_step import _build_bundle_zip + blob = _build_bundle_zip( + self.rtlil, "{}", "p", "sky130", "cf20", macros=None) + with zipfile.ZipFile(io.BytesIO(blob)) as zf: + manifest = json.loads(zf.read("manifest.json")) + self.assertNotIn("macros", manifest) + + def test_macros_packed_under_macros_subfolder(self): + from chipflow.platform.silicon_step import _build_bundle_zip + macros = { + "sram": self._entry("sram", "SRAM_64X64"), + "pll": self._entry("pll", "PLL_CORE"), + } + blob = _build_bundle_zip( + self.rtlil, '{"pins": []}', "myproj", "ihp_sg13g2", "pga144", + macros=macros) + + with zipfile.ZipFile(io.BytesIO(blob)) as zf: + names = set(zf.namelist()) + self.assertIn("manifest.json", names) + self.assertIn("top.il", names) + self.assertIn("pins.lock", names) + self.assertIn("macros/sram/SRAM_64X64.lef", names) + self.assertIn("macros/sram/SRAM_64X64.gds", names) + self.assertIn("macros/sram/SRAM_64X64.blackbox.json", names) + self.assertIn("macros/pll/PLL_CORE.lef", names) + + manifest = json.loads(zf.read("manifest.json")) + self.assertEqual(manifest["version"], "1") + self.assertEqual(manifest["project"], "myproj") + self.assertEqual(manifest["process"], "ihp_sg13g2") + self.assertEqual(manifest["package"], "pga144") + self.assertEqual(manifest["design_file"], "top.il") + self.assertEqual(manifest["pins_lock_file"], "pins.lock") + + mac = manifest["macros"] + self.assertEqual(set(mac.keys()), {"sram", "pll"}) + self.assertEqual(mac["sram"]["name"], "SRAM_64X64") + self.assertEqual(mac["sram"]["lef_file"], "macros/sram/SRAM_64X64.lef") + self.assertEqual(mac["sram"]["frame_gds_file"], "macros/sram/SRAM_64X64.gds") + self.assertEqual( + mac["sram"]["blackbox_json_file"], + "macros/sram/SRAM_64X64.blackbox.json", + ) + # All macro file-pointing keys carry _file suffix so backend's + # `_bundle_files_from_manifest` extractor finds them. + for key in mac["sram"]: + if key == "name": + continue + self.assertTrue(key.endswith("_file"), f"{key} must end in _file") + + +if __name__ == "__main__": + unittest.main()