parent
72b982d133
commit
c178a8fbb2
|
@ -0,0 +1,2 @@
|
||||||
|
__pycache__/
|
||||||
|
|
|
@ -0,0 +1,26 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
.PHONY: all
|
||||||
|
all: lint
|
||||||
|
|
||||||
|
# We need a directory to build stuff and use the "util/reggen" namespace
|
||||||
|
# in the top-level build-bin directory.
|
||||||
|
repo-top := ../../../..
|
||||||
|
build-dir := $(repo-top)/build-bin/util/reggen
|
||||||
|
|
||||||
|
$(build-dir):
|
||||||
|
mkdir -p $@
|
||||||
|
|
||||||
|
# Reggen code that doesn't yet have typing annotations
|
||||||
|
mypy-excls := gen_json.py gen_selfdoc.py
|
||||||
|
|
||||||
|
py-files := $(filter-out $(mypy-excls),$(wildcard *.py))
|
||||||
|
|
||||||
|
$(build-dir)/mypy.stamp: $(py-files) | $(build-dir)
|
||||||
|
mypy --strict $^
|
||||||
|
touch $@
|
||||||
|
|
||||||
|
.PHONY: lint
|
||||||
|
lint: $(build-dir)/mypy.stamp
|
|
@ -0,0 +1,113 @@
|
||||||
|
# Register generator `reggen` and `regtool`
|
||||||
|
|
||||||
|
The utility script `regtool.py` and collateral under `reggen` are Python
|
||||||
|
tools to read register descriptions in Hjson and generate various output
|
||||||
|
formats. The tool can output HTML documentation, standard JSON, compact
|
||||||
|
standard JSON (whitespace removed) and Hjson. The example commands assume
|
||||||
|
`$REPO_TOP` is set to the toplevel directory of the repository.
|
||||||
|
|
||||||
|
### Setup
|
||||||
|
|
||||||
|
If packages have not previously been installed you will need to set a
|
||||||
|
few things up. First use `pip3` to install some required packages:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ pip3 install --user hjson
|
||||||
|
$ pip3 install --user mistletoe
|
||||||
|
$ pip3 install --user mako
|
||||||
|
```
|
||||||
|
|
||||||
|
### Register JSON Format
|
||||||
|
|
||||||
|
For details on the register JSON format, see the
|
||||||
|
[register tool documentation]({{< relref "doc/rm/register_tool/index.md" >}}).
|
||||||
|
To ensure things stay up to date, the register JSON format information
|
||||||
|
is documented by the tool itself.
|
||||||
|
The documentation can be generated by running the following commands:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ cd $REPO_TOP/util
|
||||||
|
$ ./build_docs.py
|
||||||
|
```
|
||||||
|
Under the hood, the `build_docs.py` tool will automatically use the `reggen`
|
||||||
|
tool to produce Markdown and processing that into HTML.
|
||||||
|
|
||||||
|
### Examples using standalone regtool
|
||||||
|
|
||||||
|
Normally for documentation the `build_docs.py` tool will automatically
|
||||||
|
use `reggen`. The script `regtool.py` provides a standalone way to run
|
||||||
|
`reggen`. See the
|
||||||
|
[register tool documentation]({{< relref "doc/rm/register_tool/index.md" >}})
|
||||||
|
for details about how to invoke the tool.
|
||||||
|
|
||||||
|
The following shows an example of how to generate RTL from a register
|
||||||
|
description:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ cd $REPO_TOP/util
|
||||||
|
$ mkdir /tmp/rtl
|
||||||
|
$ ./regtool.py -r -t /tmp/rtl ../hw/ip/uart/data/uart.hjson
|
||||||
|
$ ls /tmp/rtl
|
||||||
|
uart_reg_pkg.sv uart_reg_top.sv
|
||||||
|
```
|
||||||
|
|
||||||
|
The following shows an example of how to generate a DV UVM class from
|
||||||
|
a register description:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ cd $REPO_TOP/util
|
||||||
|
$ mkdir /tmp/dv
|
||||||
|
$ ./regtool.py -s -t /tmp/dv ../hw/ip/uart/data/uart.hjson
|
||||||
|
$ ls /tmp/dv
|
||||||
|
uart_ral_pkg.sv
|
||||||
|
```
|
||||||
|
|
||||||
|
By default, the generated block, register and field models are derived from
|
||||||
|
`dv_base_reg` classes provided at `hw/dv/sv/dv_base_reg`. If required, the user
|
||||||
|
can supply the `--dv-base-prefix my_base` switch to have the models derive from
|
||||||
|
a custom, user-defined RAL classes instead:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ cd $REPO_TOP/util
|
||||||
|
$ mkdir /tmp/dv
|
||||||
|
$ ./regtool.py -s -t /tmp/dv ../hw/ip/uart/data/uart.hjson \
|
||||||
|
--dv-base-prefix my_base
|
||||||
|
$ ls /tmp/dv
|
||||||
|
uart_ral_pkg.sv
|
||||||
|
```
|
||||||
|
|
||||||
|
This makes the following assumptions:
|
||||||
|
- A FuseSoC core file aggregating the `my_base` RAL classes with the VLNV
|
||||||
|
name `lowrisc:dv:my_base_reg` is provided in the cores search path.
|
||||||
|
- These custom classes are derived from the corresponding `dv_base_reg` classes
|
||||||
|
and have the following names:
|
||||||
|
- `my_base_reg_pkg.sv`: The RAL package that includes the below sources
|
||||||
|
- `my_base_reg_block.sv`: The register block abstraction
|
||||||
|
- `my_base_reg.sv`: The register abstraction
|
||||||
|
- `my_base_reg_field.sv`: The register field abstraction
|
||||||
|
- `my_base_mem.sv`: The memory abstraction
|
||||||
|
- If any of the above class specializations is not needed, it can be
|
||||||
|
`typedef`'ed in `my_base_reg_pkg`:
|
||||||
|
```systemverilog
|
||||||
|
package my_base_reg_pkg;
|
||||||
|
import dv_base_reg_pkg::*;
|
||||||
|
typedef dv_base_reg_field my_base_reg_field;
|
||||||
|
typedef dv_base_mem my_base_mem;
|
||||||
|
`include "my_base_reg.sv"
|
||||||
|
`include "my_base_reg_block.sv"
|
||||||
|
endpackage
|
||||||
|
```
|
||||||
|
|
||||||
|
The following shows an example of how to generate a FPV csr read write assertion
|
||||||
|
module from a register description:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ cd $REPO_TOP/util
|
||||||
|
$ mkdir /tmp/fpv/vip
|
||||||
|
$ ./regtool.py -f -t /tmp/fpv/vip ../hw/ip/uart/data/uart.hjson
|
||||||
|
$ ls /tmp/fpv
|
||||||
|
uart_csr_assert_fpv.sv
|
||||||
|
```
|
||||||
|
|
||||||
|
If the target directory is not specified, the tool creates the DV file
|
||||||
|
under the `hw/ip/{module}/dv/` directory.
|
|
@ -0,0 +1,128 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
"""Enumerated types for fields
|
||||||
|
Generated by validation, used by backends
|
||||||
|
"""
|
||||||
|
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
from .lib import check_str
|
||||||
|
|
||||||
|
|
||||||
|
class JsonEnum(Enum):
|
||||||
|
def for_json(x) -> str:
|
||||||
|
return str(x)
|
||||||
|
|
||||||
|
|
||||||
|
class SwWrAccess(JsonEnum):
|
||||||
|
WR = 1
|
||||||
|
NONE = 2
|
||||||
|
|
||||||
|
|
||||||
|
class SwRdAccess(JsonEnum):
|
||||||
|
RD = 1
|
||||||
|
RC = 2 # Special handling for port
|
||||||
|
NONE = 3
|
||||||
|
|
||||||
|
|
||||||
|
class SwAccess(JsonEnum):
|
||||||
|
RO = 1
|
||||||
|
RW = 2
|
||||||
|
WO = 3
|
||||||
|
W1C = 4
|
||||||
|
W1S = 5
|
||||||
|
W0C = 6
|
||||||
|
RC = 7
|
||||||
|
R0W1C = 8
|
||||||
|
NONE = 9
|
||||||
|
|
||||||
|
|
||||||
|
class HwAccess(JsonEnum):
|
||||||
|
HRO = 1
|
||||||
|
HRW = 2
|
||||||
|
HWO = 3
|
||||||
|
NONE = 4 # No access allowed
|
||||||
|
|
||||||
|
|
||||||
|
# swaccess permitted values
|
||||||
|
# text description, access enum, wr access enum, rd access enum, ok in window
|
||||||
|
SWACCESS_PERMITTED = {
|
||||||
|
'none': ("No access", # noqa: E241
|
||||||
|
SwAccess.NONE, SwWrAccess.NONE, SwRdAccess.NONE, False), # noqa: E241
|
||||||
|
'ro': ("Read Only", # noqa: E241
|
||||||
|
SwAccess.RO, SwWrAccess.NONE, SwRdAccess.RD, True), # noqa: E241
|
||||||
|
'rc': ("Read Only, reading clears", # noqa: E241
|
||||||
|
SwAccess.RC, SwWrAccess.WR, SwRdAccess.RC, False), # noqa: E241
|
||||||
|
'rw': ("Read/Write", # noqa: E241
|
||||||
|
SwAccess.RW, SwWrAccess.WR, SwRdAccess.RD, True), # noqa: E241
|
||||||
|
'r0w1c': ("Read zero, Write with 1 clears", # noqa: E241
|
||||||
|
SwAccess.W1C, SwWrAccess.WR, SwRdAccess.NONE, False), # noqa: E241
|
||||||
|
'rw1s': ("Read, Write with 1 sets", # noqa: E241
|
||||||
|
SwAccess.W1S, SwWrAccess.WR, SwRdAccess.RD, False), # noqa: E241
|
||||||
|
'rw1c': ("Read, Write with 1 clears", # noqa: E241
|
||||||
|
SwAccess.W1C, SwWrAccess.WR, SwRdAccess.RD, False), # noqa: E241
|
||||||
|
'rw0c': ("Read, Write with 0 clears", # noqa: E241
|
||||||
|
SwAccess.W0C, SwWrAccess.WR, SwRdAccess.RD, False), # noqa: E241
|
||||||
|
'wo': ("Write Only", # noqa: E241
|
||||||
|
SwAccess.WO, SwWrAccess.WR, SwRdAccess.NONE, True) # noqa: E241
|
||||||
|
}
|
||||||
|
|
||||||
|
# hwaccess permitted values
|
||||||
|
HWACCESS_PERMITTED = {
|
||||||
|
'hro': ("Read Only", HwAccess.HRO),
|
||||||
|
'hrw': ("Read/Write", HwAccess.HRW),
|
||||||
|
'hwo': ("Write Only", HwAccess.HWO),
|
||||||
|
'none': ("No Access Needed", HwAccess.NONE)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class SWAccess:
|
||||||
|
def __init__(self, where: str, raw: object):
|
||||||
|
self.key = check_str(raw, 'swaccess for {}'.format(where))
|
||||||
|
try:
|
||||||
|
self.value = SWACCESS_PERMITTED[self.key]
|
||||||
|
except KeyError:
|
||||||
|
raise ValueError('Unknown swaccess key, {}, for {}.'
|
||||||
|
.format(self.key, where)) from None
|
||||||
|
|
||||||
|
def dv_rights(self) -> str:
|
||||||
|
'''Return a UVM access string as used by uvm_field::set_access().'''
|
||||||
|
if self.key == 'r0w1c':
|
||||||
|
return 'W1C'
|
||||||
|
return self.value[1].name
|
||||||
|
|
||||||
|
def swrd(self) -> SwRdAccess:
|
||||||
|
return self.value[3]
|
||||||
|
|
||||||
|
def allows_read(self) -> bool:
|
||||||
|
return self.value[3] != SwRdAccess.NONE
|
||||||
|
|
||||||
|
def allows_write(self) -> bool:
|
||||||
|
return self.value[2] == SwWrAccess.WR
|
||||||
|
|
||||||
|
def needs_we(self) -> bool:
|
||||||
|
'''Should the register for this field have a write-enable signal?
|
||||||
|
|
||||||
|
This is almost the same as allows_write(), but doesn't return true for
|
||||||
|
RC registers, which should use a read-enable signal (connected to their
|
||||||
|
prim_subreg's we port).
|
||||||
|
|
||||||
|
'''
|
||||||
|
return self.value[1] != SwAccess.RC and self.allows_write()
|
||||||
|
|
||||||
|
|
||||||
|
class HWAccess:
|
||||||
|
def __init__(self, where: str, raw: object):
|
||||||
|
self.key = check_str(raw, 'hwaccess for {}'.format(where))
|
||||||
|
try:
|
||||||
|
self.value = HWACCESS_PERMITTED[self.key]
|
||||||
|
except KeyError:
|
||||||
|
raise ValueError('Unknown hwaccess key, {}, for {}.'
|
||||||
|
.format(self.key, where)) from None
|
||||||
|
|
||||||
|
def allows_read(self) -> bool:
|
||||||
|
return self.key in ['hro', 'hrw']
|
||||||
|
|
||||||
|
def allows_write(self) -> bool:
|
||||||
|
return self.key in ['hrw', 'hwo']
|
|
@ -0,0 +1,54 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
from typing import Dict, List
|
||||||
|
|
||||||
|
from .bits import Bits
|
||||||
|
from .signal import Signal
|
||||||
|
from .lib import check_keys, check_name, check_str, check_list
|
||||||
|
|
||||||
|
|
||||||
|
class Alert(Signal):
|
||||||
|
def __init__(self, name: str, desc: str, bit: int, fatal: bool):
|
||||||
|
super().__init__(name, desc, Bits(bit, bit))
|
||||||
|
self.bit = bit
|
||||||
|
self.fatal = fatal
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_raw(what: str,
|
||||||
|
lsb: int,
|
||||||
|
raw: object) -> 'Alert':
|
||||||
|
rd = check_keys(raw, what, ['name', 'desc'], [])
|
||||||
|
|
||||||
|
name = check_name(rd['name'], 'name field of ' + what)
|
||||||
|
desc = check_str(rd['desc'], 'desc field of ' + what)
|
||||||
|
|
||||||
|
# Make sense of the alert name, which should be prefixed with recov_ or
|
||||||
|
# fatal_.
|
||||||
|
pfx = name.split('_')[0]
|
||||||
|
if pfx == 'recov':
|
||||||
|
fatal = False
|
||||||
|
elif pfx == 'fatal':
|
||||||
|
fatal = True
|
||||||
|
else:
|
||||||
|
raise ValueError('Invalid name field of {}: alert names must be '
|
||||||
|
'prefixed with "recov_" or "fatal_". Saw {!r}.'
|
||||||
|
.format(what, name))
|
||||||
|
|
||||||
|
return Alert(name, desc, lsb, fatal)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_raw_list(what: str, raw: object) -> List['Alert']:
|
||||||
|
ret = []
|
||||||
|
for idx, entry in enumerate(check_list(raw, what)):
|
||||||
|
entry_what = 'entry {} of {}'.format(idx, what)
|
||||||
|
alert = Alert.from_raw(entry_what, idx, entry)
|
||||||
|
ret.append(alert)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def _asdict(self) -> Dict[str, object]:
|
||||||
|
return {
|
||||||
|
'name': self.name,
|
||||||
|
'desc': self.desc,
|
||||||
|
}
|
|
@ -0,0 +1,87 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
'''Support code for bit ranges in reggen'''
|
||||||
|
|
||||||
|
from typing import Tuple
|
||||||
|
|
||||||
|
from .lib import check_str
|
||||||
|
from .params import ReggenParams
|
||||||
|
|
||||||
|
|
||||||
|
class Bits:
|
||||||
|
def __init__(self, msb: int, lsb: int):
|
||||||
|
assert 0 <= lsb <= msb
|
||||||
|
self.msb = msb
|
||||||
|
self.lsb = lsb
|
||||||
|
|
||||||
|
def bitmask(self) -> int:
|
||||||
|
return (1 << (self.msb + 1)) - (1 << self.lsb)
|
||||||
|
|
||||||
|
def width(self) -> int:
|
||||||
|
return 1 + self.msb - self.lsb
|
||||||
|
|
||||||
|
def max_value(self) -> int:
|
||||||
|
return (1 << self.width()) - 1
|
||||||
|
|
||||||
|
def extract_field(self, reg_val: int) -> int:
|
||||||
|
return (reg_val & self.bitmask()) >> self.lsb
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_raw(where: str,
|
||||||
|
reg_width: int,
|
||||||
|
params: ReggenParams,
|
||||||
|
raw: object) -> 'Bits':
|
||||||
|
# Bits should be specified as msb:lsb or as just a single bit index.
|
||||||
|
if isinstance(raw, int):
|
||||||
|
msb = raw
|
||||||
|
lsb = raw
|
||||||
|
else:
|
||||||
|
str_val = check_str(raw, 'bits field for {}'.format(where))
|
||||||
|
msb, lsb = Bits._parse_str(where, params, str_val)
|
||||||
|
|
||||||
|
# Check that the bit indices look sensible
|
||||||
|
if msb < lsb:
|
||||||
|
raise ValueError('msb for {} is {}: less than {}, the msb.'
|
||||||
|
.format(where, msb, lsb))
|
||||||
|
if lsb < 0:
|
||||||
|
raise ValueError('lsb for {} is {}, which is negative.'
|
||||||
|
.format(where, lsb))
|
||||||
|
if msb >= reg_width:
|
||||||
|
raise ValueError("msb for {} is {}, which doesn't fit in {} bits."
|
||||||
|
.format(where, msb, reg_width))
|
||||||
|
|
||||||
|
return Bits(msb, lsb)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _parse_str(where: str,
|
||||||
|
params: ReggenParams,
|
||||||
|
str_val: str) -> Tuple[int, int]:
|
||||||
|
try:
|
||||||
|
idx = int(str_val)
|
||||||
|
return (idx, idx)
|
||||||
|
except ValueError:
|
||||||
|
# Doesn't look like an integer. Never mind: try msb:lsb
|
||||||
|
pass
|
||||||
|
|
||||||
|
parts = str_val.split(':')
|
||||||
|
if len(parts) != 2:
|
||||||
|
raise ValueError('bits field for {} is not an '
|
||||||
|
'integer or of the form msb:lsb. Saw {!r}.'
|
||||||
|
.format(where, str_val))
|
||||||
|
return (params.expand(parts[0],
|
||||||
|
'msb of bits field for {}'.format(where)),
|
||||||
|
params.expand(parts[1],
|
||||||
|
'lsb of bits field for {}'.format(where)))
|
||||||
|
|
||||||
|
def make_translated(self, bit_offset: int) -> 'Bits':
|
||||||
|
assert 0 <= bit_offset
|
||||||
|
return Bits(self.msb + bit_offset, self.lsb + bit_offset)
|
||||||
|
|
||||||
|
def as_str(self) -> str:
|
||||||
|
if self.lsb == self.msb:
|
||||||
|
return str(self.lsb)
|
||||||
|
else:
|
||||||
|
assert self.lsb < self.msb
|
||||||
|
return '{}:{}'.format(self.msb, self.lsb)
|
|
@ -0,0 +1,176 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
'''Code representing a list of bus interfaces for a block'''
|
||||||
|
|
||||||
|
from typing import Dict, List, Optional, Tuple
|
||||||
|
|
||||||
|
from .inter_signal import InterSignal
|
||||||
|
from .lib import check_list, check_keys, check_str, check_optional_str
|
||||||
|
|
||||||
|
|
||||||
|
class BusInterfaces:
|
||||||
|
def __init__(self,
|
||||||
|
has_unnamed_host: bool,
|
||||||
|
named_hosts: List[str],
|
||||||
|
has_unnamed_device: bool,
|
||||||
|
named_devices: List[str]):
|
||||||
|
assert has_unnamed_device or named_devices
|
||||||
|
assert len(named_hosts) == len(set(named_hosts))
|
||||||
|
assert len(named_devices) == len(set(named_devices))
|
||||||
|
|
||||||
|
self.has_unnamed_host = has_unnamed_host
|
||||||
|
self.named_hosts = named_hosts
|
||||||
|
self.has_unnamed_device = has_unnamed_device
|
||||||
|
self.named_devices = named_devices
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_raw(raw: object, where: str) -> 'BusInterfaces':
|
||||||
|
has_unnamed_host = False
|
||||||
|
named_hosts = []
|
||||||
|
|
||||||
|
has_unnamed_device = False
|
||||||
|
named_devices = []
|
||||||
|
|
||||||
|
for idx, raw_entry in enumerate(check_list(raw, where)):
|
||||||
|
entry_what = 'entry {} of {}'.format(idx + 1, where)
|
||||||
|
ed = check_keys(raw_entry, entry_what,
|
||||||
|
['protocol', 'direction'],
|
||||||
|
['name'])
|
||||||
|
|
||||||
|
protocol = check_str(ed['protocol'],
|
||||||
|
'protocol field of ' + entry_what)
|
||||||
|
if protocol != 'tlul':
|
||||||
|
raise ValueError('Unknown protocol {!r} at {}'
|
||||||
|
.format(protocol, entry_what))
|
||||||
|
|
||||||
|
direction = check_str(ed['direction'],
|
||||||
|
'direction field of ' + entry_what)
|
||||||
|
if direction not in ['device', 'host']:
|
||||||
|
raise ValueError('Unknown interface direction {!r} at {}'
|
||||||
|
.format(direction, entry_what))
|
||||||
|
|
||||||
|
name = check_optional_str(ed.get('name'),
|
||||||
|
'name field of ' + entry_what)
|
||||||
|
|
||||||
|
if direction == 'host':
|
||||||
|
if name is None:
|
||||||
|
if has_unnamed_host:
|
||||||
|
raise ValueError('Multiple un-named host '
|
||||||
|
'interfaces at {}'
|
||||||
|
.format(where))
|
||||||
|
has_unnamed_host = True
|
||||||
|
else:
|
||||||
|
if name in named_hosts:
|
||||||
|
raise ValueError('Duplicate host interface '
|
||||||
|
'with name {!r} at {}'
|
||||||
|
.format(name, where))
|
||||||
|
named_hosts.append(name)
|
||||||
|
else:
|
||||||
|
if name is None:
|
||||||
|
if has_unnamed_device:
|
||||||
|
raise ValueError('Multiple un-named device '
|
||||||
|
'interfaces at {}'
|
||||||
|
.format(where))
|
||||||
|
has_unnamed_device = True
|
||||||
|
else:
|
||||||
|
if name in named_devices:
|
||||||
|
raise ValueError('Duplicate device interface '
|
||||||
|
'with name {!r} at {}'
|
||||||
|
.format(name, where))
|
||||||
|
named_devices.append(name)
|
||||||
|
|
||||||
|
if not (has_unnamed_device or named_devices):
|
||||||
|
raise ValueError('No device interface at ' + where)
|
||||||
|
|
||||||
|
return BusInterfaces(has_unnamed_host, named_hosts,
|
||||||
|
has_unnamed_device, named_devices)
|
||||||
|
|
||||||
|
def has_host(self) -> bool:
|
||||||
|
return bool(self.has_unnamed_host or self.named_hosts)
|
||||||
|
|
||||||
|
def _interfaces(self) -> List[Tuple[bool, Optional[str]]]:
|
||||||
|
ret = [] # type: List[Tuple[bool, Optional[str]]]
|
||||||
|
if self.has_unnamed_host:
|
||||||
|
ret.append((True, None))
|
||||||
|
for name in self.named_hosts:
|
||||||
|
ret.append((True, name))
|
||||||
|
|
||||||
|
if self.has_unnamed_device:
|
||||||
|
ret.append((False, None))
|
||||||
|
for name in self.named_devices:
|
||||||
|
ret.append((False, name))
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _if_dict(is_host: bool, name: Optional[str]) -> Dict[str, object]:
|
||||||
|
ret = {
|
||||||
|
'protocol': 'tlul',
|
||||||
|
'direction': 'host' if is_host else 'device'
|
||||||
|
} # type: Dict[str, object]
|
||||||
|
|
||||||
|
if name is not None:
|
||||||
|
ret['name'] = name
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def as_dicts(self) -> List[Dict[str, object]]:
|
||||||
|
return [BusInterfaces._if_dict(is_host, name)
|
||||||
|
for is_host, name in self._interfaces()]
|
||||||
|
|
||||||
|
def get_port_name(self, is_host: bool, name: Optional[str]) -> str:
|
||||||
|
if is_host:
|
||||||
|
tl_suffix = 'tl_h'
|
||||||
|
else:
|
||||||
|
tl_suffix = 'tl_d' if self.has_host() else 'tl'
|
||||||
|
|
||||||
|
return (tl_suffix if name is None
|
||||||
|
else '{}_{}'.format(name, tl_suffix))
|
||||||
|
|
||||||
|
def get_port_names(self, inc_hosts: bool, inc_devices: bool) -> List[str]:
|
||||||
|
ret = []
|
||||||
|
for is_host, name in self._interfaces():
|
||||||
|
if not (inc_hosts if is_host else inc_devices):
|
||||||
|
continue
|
||||||
|
ret.append(self.get_port_name(is_host, name))
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def _if_inter_signal(self,
|
||||||
|
is_host: bool,
|
||||||
|
name: Optional[str]) -> InterSignal:
|
||||||
|
act = 'req' if is_host else 'rsp'
|
||||||
|
return InterSignal(self.get_port_name(is_host, name),
|
||||||
|
None, 'tl', 'tlul_pkg', 'req_rsp', act, 1, None)
|
||||||
|
|
||||||
|
def inter_signals(self) -> List[InterSignal]:
|
||||||
|
return [self._if_inter_signal(is_host, name)
|
||||||
|
for is_host, name in self._interfaces()]
|
||||||
|
|
||||||
|
def has_interface(self, is_host: bool, name: Optional[str]) -> bool:
|
||||||
|
if is_host:
|
||||||
|
if name is None:
|
||||||
|
return self.has_unnamed_host
|
||||||
|
else:
|
||||||
|
return name in self.named_hosts
|
||||||
|
else:
|
||||||
|
if name is None:
|
||||||
|
return self.has_unnamed_device
|
||||||
|
else:
|
||||||
|
return name in self.named_devices
|
||||||
|
|
||||||
|
def find_port_name(self, is_host: bool, name: Optional[str]) -> str:
|
||||||
|
'''Look up the given host/name pair and return its port name.
|
||||||
|
|
||||||
|
Raises a KeyError if there is no match.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if not self.has_interface(is_host, name):
|
||||||
|
called = ('with no name'
|
||||||
|
if name is None else 'called {!r}'.format(name))
|
||||||
|
raise KeyError('There is no {} bus interface {}.'
|
||||||
|
.format('host' if is_host else 'device',
|
||||||
|
called))
|
||||||
|
|
||||||
|
return self.get_port_name(is_host, name)
|
|
@ -0,0 +1,94 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
'''Code representing clocking or resets for an IP block'''
|
||||||
|
|
||||||
|
from typing import Dict, List, Optional
|
||||||
|
|
||||||
|
from .lib import check_keys, check_list, check_bool, check_optional_name
|
||||||
|
|
||||||
|
|
||||||
|
class ClockingItem:
|
||||||
|
def __init__(self, clock: Optional[str], reset: Optional[str], primary: bool):
|
||||||
|
if primary:
|
||||||
|
assert clock is not None
|
||||||
|
assert reset is not None
|
||||||
|
|
||||||
|
self.clock = clock
|
||||||
|
self.reset = reset
|
||||||
|
self.primary = primary
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_raw(raw: object, only_item: bool, where: str) -> 'ClockingItem':
|
||||||
|
what = f'clocking item at {where}'
|
||||||
|
rd = check_keys(raw, what, [], ['clock', 'reset', 'primary'])
|
||||||
|
|
||||||
|
clock = check_optional_name(rd.get('clock'), 'clock field of ' + what)
|
||||||
|
reset = check_optional_name(rd.get('reset'), 'reset field of ' + what)
|
||||||
|
primary = check_bool(rd.get('primary', only_item),
|
||||||
|
'primary field of ' + what)
|
||||||
|
|
||||||
|
if primary:
|
||||||
|
if clock is None:
|
||||||
|
raise ValueError('No clock signal for primary '
|
||||||
|
f'clocking item at {what}.')
|
||||||
|
if reset is None:
|
||||||
|
raise ValueError('No reset signal for primary '
|
||||||
|
f'clocking item at {what}.')
|
||||||
|
|
||||||
|
return ClockingItem(clock, reset, primary)
|
||||||
|
|
||||||
|
def _asdict(self) -> Dict[str, object]:
|
||||||
|
ret = {} # type: Dict[str, object]
|
||||||
|
if self.clock is not None:
|
||||||
|
ret['clock'] = self.clock,
|
||||||
|
if self.reset is not None:
|
||||||
|
ret['reset'] = self.reset
|
||||||
|
|
||||||
|
ret['primary'] = self.primary
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
class Clocking:
|
||||||
|
def __init__(self, items: List[ClockingItem], primary: ClockingItem):
|
||||||
|
assert items
|
||||||
|
self.items = items
|
||||||
|
self.primary = primary
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_raw(raw: object, where: str) -> 'Clocking':
|
||||||
|
what = f'clocking items at {where}'
|
||||||
|
raw_items = check_list(raw, what)
|
||||||
|
if not raw_items:
|
||||||
|
raise ValueError(f'Empty list of clocking items at {where}.')
|
||||||
|
|
||||||
|
just_one_item = len(raw_items) == 1
|
||||||
|
|
||||||
|
items = []
|
||||||
|
primaries = []
|
||||||
|
for idx, raw_item in enumerate(raw_items):
|
||||||
|
item_where = f'entry {idx} of {what}'
|
||||||
|
item = ClockingItem.from_raw(raw_item, just_one_item, item_where)
|
||||||
|
if item.primary:
|
||||||
|
primaries.append(item)
|
||||||
|
items.append(item)
|
||||||
|
|
||||||
|
if len(primaries) != 1:
|
||||||
|
raise ValueError('There should be exactly one primary clocking '
|
||||||
|
f'item at {where}, but we saw {len(primaries)}.')
|
||||||
|
|
||||||
|
return Clocking(items, primaries[0])
|
||||||
|
|
||||||
|
def other_clocks(self) -> List[str]:
|
||||||
|
ret = []
|
||||||
|
for item in self.items:
|
||||||
|
if not item.primary and item.clock is not None:
|
||||||
|
ret.append(item.clock)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def clock_signals(self) -> List[str]:
|
||||||
|
return [item.clock for item in self.items if item.clock is not None]
|
||||||
|
|
||||||
|
def reset_signals(self) -> List[str]:
|
||||||
|
return [item.reset for item in self.items if item.reset is not None]
|
|
@ -0,0 +1,35 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
from typing import Dict
|
||||||
|
|
||||||
|
from .lib import check_keys, check_str, check_int
|
||||||
|
|
||||||
|
REQUIRED_FIELDS = {
|
||||||
|
'name': ['s', "name of the member of the enum"],
|
||||||
|
'desc': ['t', "description when field has this value"],
|
||||||
|
'value': ['d', "value of this member of the enum"]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class EnumEntry:
|
||||||
|
def __init__(self, where: str, max_val: int, raw: object):
|
||||||
|
rd = check_keys(raw, where,
|
||||||
|
list(REQUIRED_FIELDS.keys()),
|
||||||
|
[])
|
||||||
|
|
||||||
|
self.name = check_str(rd['name'], 'name field of {}'.format(where))
|
||||||
|
self.desc = check_str(rd['desc'], 'desc field of {}'.format(where))
|
||||||
|
self.value = check_int(rd['value'], 'value field of {}'.format(where))
|
||||||
|
if not (0 <= self.value <= max_val):
|
||||||
|
raise ValueError("value for {} is {}, which isn't representable "
|
||||||
|
"in the field (representable range: 0 .. {})."
|
||||||
|
.format(where, self.value, max_val))
|
||||||
|
|
||||||
|
def _asdict(self) -> Dict[str, object]:
|
||||||
|
return {
|
||||||
|
'name': self.name,
|
||||||
|
'desc': self.desc,
|
||||||
|
'value': str(self.value)
|
||||||
|
}
|
|
@ -0,0 +1,283 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
from typing import Dict, List, Optional
|
||||||
|
|
||||||
|
from .access import SWAccess, HWAccess
|
||||||
|
from .bits import Bits
|
||||||
|
from .enum_entry import EnumEntry
|
||||||
|
from .lib import (check_keys, check_str, check_name,
|
||||||
|
check_list, check_str_list, check_xint)
|
||||||
|
from .params import ReggenParams
|
||||||
|
|
||||||
|
REQUIRED_FIELDS = {
|
||||||
|
'bits': ['b', "bit or bit range (msb:lsb)"]
|
||||||
|
}
|
||||||
|
|
||||||
|
OPTIONAL_FIELDS = {
|
||||||
|
'name': ['s', "name of the field"],
|
||||||
|
'desc': ['t', "description of field (required if the field has a name)"],
|
||||||
|
'swaccess': [
|
||||||
|
's', "software access permission, copied from "
|
||||||
|
"register if not provided in field. "
|
||||||
|
"(Tool adds if not provided.)"
|
||||||
|
],
|
||||||
|
'hwaccess': [
|
||||||
|
's', "hardware access permission, copied from "
|
||||||
|
"register if not prvided in field. "
|
||||||
|
"(Tool adds if not provided.)"
|
||||||
|
],
|
||||||
|
'resval': [
|
||||||
|
'x', "reset value, comes from register resval "
|
||||||
|
"if not provided in field. Zero if neither "
|
||||||
|
"are provided and the field is readable, "
|
||||||
|
"x if neither are provided and the field "
|
||||||
|
"is wo. Must match if both are provided."
|
||||||
|
],
|
||||||
|
'enum': ['l', "list of permitted enumeration groups"],
|
||||||
|
'tags': [
|
||||||
|
's',
|
||||||
|
"tags for the field, followed by the format 'tag_name:item1:item2...'"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class Field:
|
||||||
|
def __init__(self,
|
||||||
|
name: str,
|
||||||
|
desc: Optional[str],
|
||||||
|
tags: List[str],
|
||||||
|
swaccess: SWAccess,
|
||||||
|
hwaccess: HWAccess,
|
||||||
|
bits: Bits,
|
||||||
|
resval: Optional[int],
|
||||||
|
enum: Optional[List[EnumEntry]]):
|
||||||
|
self.name = name
|
||||||
|
self.desc = desc
|
||||||
|
self.tags = tags
|
||||||
|
self.swaccess = swaccess
|
||||||
|
self.hwaccess = hwaccess
|
||||||
|
self.bits = bits
|
||||||
|
self.resval = resval
|
||||||
|
self.enum = enum
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_raw(reg_name: str,
|
||||||
|
field_idx: int,
|
||||||
|
num_fields: int,
|
||||||
|
default_swaccess: SWAccess,
|
||||||
|
default_hwaccess: HWAccess,
|
||||||
|
reg_resval: Optional[int],
|
||||||
|
reg_width: int,
|
||||||
|
params: ReggenParams,
|
||||||
|
raw: object) -> 'Field':
|
||||||
|
where = 'field {} of {} register'.format(field_idx, reg_name)
|
||||||
|
rd = check_keys(raw, where,
|
||||||
|
list(REQUIRED_FIELDS.keys()),
|
||||||
|
list(OPTIONAL_FIELDS.keys()))
|
||||||
|
|
||||||
|
raw_name = rd.get('name')
|
||||||
|
if raw_name is None:
|
||||||
|
name = ('field{}'.format(field_idx + 1)
|
||||||
|
if num_fields > 1 else reg_name)
|
||||||
|
else:
|
||||||
|
name = check_name(raw_name, 'name of {}'.format(where))
|
||||||
|
|
||||||
|
raw_desc = rd.get('desc')
|
||||||
|
if raw_desc is None and raw_name is not None:
|
||||||
|
raise ValueError('Missing desc field for {}'
|
||||||
|
.format(where))
|
||||||
|
if raw_desc is None:
|
||||||
|
desc = None
|
||||||
|
else:
|
||||||
|
desc = check_str(raw_desc, 'desc field for {}'.format(where))
|
||||||
|
|
||||||
|
tags = check_str_list(rd.get('tags', []),
|
||||||
|
'tags for {}'.format(where))
|
||||||
|
|
||||||
|
raw_swaccess = rd.get('swaccess')
|
||||||
|
if raw_swaccess is not None:
|
||||||
|
swaccess = SWAccess(where, raw_swaccess)
|
||||||
|
else:
|
||||||
|
swaccess = default_swaccess
|
||||||
|
|
||||||
|
raw_hwaccess = rd.get('hwaccess')
|
||||||
|
if raw_hwaccess is not None:
|
||||||
|
hwaccess = HWAccess(where, raw_hwaccess)
|
||||||
|
else:
|
||||||
|
hwaccess = default_hwaccess
|
||||||
|
|
||||||
|
bits = Bits.from_raw(where, reg_width, params, rd['bits'])
|
||||||
|
|
||||||
|
raw_resval = rd.get('resval')
|
||||||
|
if raw_resval is None:
|
||||||
|
# The field doesn't define a reset value. Use bits from reg_resval
|
||||||
|
# if it's defined, otherwise None (which means "x").
|
||||||
|
if reg_resval is None:
|
||||||
|
resval = None
|
||||||
|
else:
|
||||||
|
resval = bits.extract_field(reg_resval)
|
||||||
|
else:
|
||||||
|
# The field does define a reset value. It should be an integer or
|
||||||
|
# 'x'. In the latter case, we set resval to None (as above).
|
||||||
|
resval = check_xint(raw_resval, 'resval field for {}'.format(where))
|
||||||
|
if resval is None:
|
||||||
|
# We don't allow a field to be explicitly 'x' on reset but for
|
||||||
|
# the containing register to have a reset value.
|
||||||
|
if reg_resval is not None:
|
||||||
|
raise ValueError('resval field for {} is "x", but the '
|
||||||
|
'register defines a resval as well.'
|
||||||
|
.format(where))
|
||||||
|
else:
|
||||||
|
# Check that the reset value is representable with bits
|
||||||
|
if not (0 <= resval <= bits.max_value()):
|
||||||
|
raise ValueError("resval field for {} is {}, which "
|
||||||
|
"isn't representable as an unsigned "
|
||||||
|
"{}-bit integer."
|
||||||
|
.format(where, resval, bits.width()))
|
||||||
|
|
||||||
|
# If the register had a resval, check this value matches it.
|
||||||
|
if reg_resval is not None:
|
||||||
|
resval_from_reg = bits.extract_field(reg_resval)
|
||||||
|
if resval != resval_from_reg:
|
||||||
|
raise ValueError('resval field for {} is {}, but the '
|
||||||
|
'register defines a resval as well, '
|
||||||
|
'where bits {}:{} would give {}.'
|
||||||
|
.format(where, resval,
|
||||||
|
bits.msb, bits.lsb,
|
||||||
|
resval_from_reg))
|
||||||
|
|
||||||
|
raw_enum = rd.get('enum')
|
||||||
|
if raw_enum is None:
|
||||||
|
enum = None
|
||||||
|
else:
|
||||||
|
enum = []
|
||||||
|
raw_entries = check_list(raw_enum,
|
||||||
|
'enum field for {}'.format(where))
|
||||||
|
enum_val_to_name = {} # type: Dict[int, str]
|
||||||
|
for idx, raw_entry in enumerate(raw_entries):
|
||||||
|
entry = EnumEntry('entry {} in enum list for {}'
|
||||||
|
.format(idx + 1, where),
|
||||||
|
bits.max_value(),
|
||||||
|
raw_entry)
|
||||||
|
if entry.value in enum_val_to_name:
|
||||||
|
raise ValueError('In {}, duplicate enum entries for '
|
||||||
|
'value {} ({} and {}).'
|
||||||
|
.format(where,
|
||||||
|
entry.value,
|
||||||
|
enum_val_to_name[entry.value],
|
||||||
|
entry.name))
|
||||||
|
enum.append(entry)
|
||||||
|
enum_val_to_name[entry.value] = entry.name
|
||||||
|
|
||||||
|
return Field(name, desc, tags, swaccess, hwaccess, bits, resval, enum)
|
||||||
|
|
||||||
|
def has_incomplete_enum(self) -> bool:
|
||||||
|
return (self.enum is not None and
|
||||||
|
len(self.enum) != 1 + self.bits.max_value())
|
||||||
|
|
||||||
|
def get_n_bits(self, hwext: bool, hwqe: bool, hwre: bool, bittype: List[str]) -> int:
|
||||||
|
'''Get the size of this field in bits
|
||||||
|
|
||||||
|
bittype should be a list of the types of signals to count. The elements
|
||||||
|
should come from the following list:
|
||||||
|
|
||||||
|
- 'q': A signal for the value of the field. Only needed if HW can read
|
||||||
|
its contents.
|
||||||
|
|
||||||
|
- 'd': A signal for the next value of the field. Only needed if HW can
|
||||||
|
write its contents.
|
||||||
|
|
||||||
|
- 'de': A write enable signal for hardware accesses. Only needed if HW
|
||||||
|
can write the field's contents and the register data is stored in the
|
||||||
|
register block (true if the hwext flag is false).
|
||||||
|
|
||||||
|
'''
|
||||||
|
n_bits = 0
|
||||||
|
if "q" in bittype and self.hwaccess.allows_read():
|
||||||
|
n_bits += self.bits.width()
|
||||||
|
if "d" in bittype and self.hwaccess.allows_write():
|
||||||
|
n_bits += self.bits.width()
|
||||||
|
if "qe" in bittype and self.hwaccess.allows_read():
|
||||||
|
n_bits += int(hwqe)
|
||||||
|
if "re" in bittype and self.hwaccess.allows_read():
|
||||||
|
n_bits += int(hwre)
|
||||||
|
if "de" in bittype and self.hwaccess.allows_write():
|
||||||
|
n_bits += int(not hwext)
|
||||||
|
return n_bits
|
||||||
|
|
||||||
|
def make_multi(self,
|
||||||
|
reg_width: int,
|
||||||
|
min_reg_idx: int,
|
||||||
|
max_reg_idx: int,
|
||||||
|
cname: str,
|
||||||
|
creg_idx: int,
|
||||||
|
stripped: bool) -> List['Field']:
|
||||||
|
assert 0 <= min_reg_idx <= max_reg_idx
|
||||||
|
|
||||||
|
# Check that we won't overflow reg_width. We assume that the LSB should
|
||||||
|
# be preserved: if msb=5, lsb=2 then the replicated copies will be
|
||||||
|
# [5:2], [11:8] etc.
|
||||||
|
num_copies = 1 + max_reg_idx - min_reg_idx
|
||||||
|
field_width = self.bits.msb + 1
|
||||||
|
|
||||||
|
if field_width * num_copies > reg_width:
|
||||||
|
raise ValueError('Cannot replicate field {} {} times: the '
|
||||||
|
'resulting width would be {}, but the register '
|
||||||
|
'width is just {}.'
|
||||||
|
.format(self.name, num_copies,
|
||||||
|
field_width * num_copies, reg_width))
|
||||||
|
|
||||||
|
desc = ('For {}{}'.format(cname, creg_idx)
|
||||||
|
if stripped else self.desc)
|
||||||
|
enum = None if stripped else self.enum
|
||||||
|
|
||||||
|
ret = []
|
||||||
|
for reg_idx in range(min_reg_idx, max_reg_idx + 1):
|
||||||
|
name = '{}_{}'.format(self.name, reg_idx)
|
||||||
|
|
||||||
|
bit_offset = field_width * (reg_idx - min_reg_idx)
|
||||||
|
bits = (self.bits
|
||||||
|
if bit_offset == 0
|
||||||
|
else self.bits.make_translated(bit_offset))
|
||||||
|
|
||||||
|
ret.append(Field(name, desc,
|
||||||
|
self.tags, self.swaccess, self.hwaccess,
|
||||||
|
bits, self.resval, enum))
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def make_suffixed(self, suffix: str,
|
||||||
|
cname: str,
|
||||||
|
creg_idx: int,
|
||||||
|
stripped: bool) -> 'Field':
|
||||||
|
desc = ('For {}{}'.format(cname, creg_idx)
|
||||||
|
if stripped else self.desc)
|
||||||
|
enum = None if stripped else self.enum
|
||||||
|
|
||||||
|
return Field(self.name + suffix,
|
||||||
|
desc, self.tags, self.swaccess, self.hwaccess,
|
||||||
|
self.bits, self.resval, enum)
|
||||||
|
|
||||||
|
def _asdict(self) -> Dict[str, object]:
|
||||||
|
rd = {
|
||||||
|
'bits': self.bits.as_str(),
|
||||||
|
'name': self.name,
|
||||||
|
'swaccess': self.swaccess.key,
|
||||||
|
'hwaccess': self.hwaccess.key,
|
||||||
|
'resval': 'x' if self.resval is None else str(self.resval),
|
||||||
|
'tags': self.tags
|
||||||
|
} # type: Dict[str, object]
|
||||||
|
|
||||||
|
if self.desc is not None:
|
||||||
|
rd['desc'] = self.desc
|
||||||
|
if self.enum is not None:
|
||||||
|
rd['enum'] = self.enum
|
||||||
|
return rd
|
||||||
|
|
||||||
|
def sw_readable(self) -> bool:
|
||||||
|
return self.swaccess.key not in ['wo', 'r0w1c']
|
||||||
|
|
||||||
|
def sw_writable(self) -> bool:
|
||||||
|
return self.swaccess.key != 'ro'
|
|
@ -0,0 +1,172 @@
|
||||||
|
// Copyright lowRISC contributors.
|
||||||
|
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
// FPV CSR read and write assertions auto-generated by `reggen` containing data structure
|
||||||
|
// Do Not Edit directly
|
||||||
|
// TODO: This automation currently only support register without HW write access
|
||||||
|
<%
|
||||||
|
from reggen import (gen_fpv)
|
||||||
|
from reggen.register import Register
|
||||||
|
|
||||||
|
from topgen import lib
|
||||||
|
|
||||||
|
lblock = block.name.lower()
|
||||||
|
|
||||||
|
# This template shouldn't be instantiated if the device interface
|
||||||
|
# doesn't actually have any registers.
|
||||||
|
assert rb.flat_regs
|
||||||
|
|
||||||
|
%>\
|
||||||
|
<%def name="construct_classes(block)">\
|
||||||
|
|
||||||
|
`include "prim_assert.sv"
|
||||||
|
`ifdef UVM
|
||||||
|
import uvm_pkg::*;
|
||||||
|
`endif
|
||||||
|
|
||||||
|
// Block: ${lblock}
|
||||||
|
module ${mod_base}_csr_assert_fpv import tlul_pkg::*;
|
||||||
|
import top_pkg::*;(
|
||||||
|
input clk_i,
|
||||||
|
input rst_ni,
|
||||||
|
|
||||||
|
// tile link ports
|
||||||
|
input tl_h2d_t h2d,
|
||||||
|
input tl_d2h_t d2h
|
||||||
|
);
|
||||||
|
<%
|
||||||
|
addr_width = rb.get_addr_width()
|
||||||
|
addr_msb = addr_width - 1
|
||||||
|
hro_regs_list = [r for r in rb.flat_regs if not r.is_hw_writable()]
|
||||||
|
num_hro_regs = len(hro_regs_list)
|
||||||
|
hro_map = {r.offset: (idx, r) for idx, r in enumerate(hro_regs_list)}
|
||||||
|
%>\
|
||||||
|
|
||||||
|
// Currently FPV csr assertion only support HRO registers.
|
||||||
|
% if num_hro_regs > 0:
|
||||||
|
`ifndef VERILATOR
|
||||||
|
`ifndef SYNTHESIS
|
||||||
|
|
||||||
|
parameter bit[3:0] MAX_A_SOURCE = 10; // used for FPV only to reduce runtime
|
||||||
|
|
||||||
|
typedef struct packed {
|
||||||
|
logic [TL_DW-1:0] wr_data;
|
||||||
|
logic [TL_AW-1:0] addr;
|
||||||
|
logic wr_pending;
|
||||||
|
logic rd_pending;
|
||||||
|
} pend_item_t;
|
||||||
|
|
||||||
|
bit disable_sva;
|
||||||
|
|
||||||
|
// mask register to convert byte to bit
|
||||||
|
logic [TL_DW-1:0] a_mask_bit;
|
||||||
|
|
||||||
|
assign a_mask_bit[7:0] = h2d.a_mask[0] ? '1 : '0;
|
||||||
|
assign a_mask_bit[15:8] = h2d.a_mask[1] ? '1 : '0;
|
||||||
|
assign a_mask_bit[23:16] = h2d.a_mask[2] ? '1 : '0;
|
||||||
|
assign a_mask_bit[31:24] = h2d.a_mask[3] ? '1 : '0;
|
||||||
|
|
||||||
|
bit [${addr_msb}-2:0] hro_idx; // index for exp_vals
|
||||||
|
bit [${addr_msb}:0] normalized_addr;
|
||||||
|
|
||||||
|
// Map register address with hro_idx in exp_vals array.
|
||||||
|
always_comb begin: decode_hro_addr_to_idx
|
||||||
|
unique case (pend_trans[d2h.d_source].addr)
|
||||||
|
% for idx, r in hro_map.values():
|
||||||
|
${r.offset}: hro_idx <= ${idx};
|
||||||
|
% endfor
|
||||||
|
// If the register is not a HRO register, the write data will all update to this default idx.
|
||||||
|
default: hro_idx <= ${num_hro_regs};
|
||||||
|
endcase
|
||||||
|
end
|
||||||
|
|
||||||
|
// store internal expected values for HW ReadOnly registers
|
||||||
|
logic [TL_DW-1:0] exp_vals[${num_hro_regs + 1}];
|
||||||
|
|
||||||
|
`ifdef FPV_ON
|
||||||
|
pend_item_t [MAX_A_SOURCE:0] pend_trans;
|
||||||
|
`else
|
||||||
|
pend_item_t [2**TL_AIW-1:0] pend_trans;
|
||||||
|
`endif
|
||||||
|
|
||||||
|
// normalized address only take the [${addr_msb}:2] address from the TLUL a_address
|
||||||
|
assign normalized_addr = {h2d.a_address[${addr_msb}:2], 2'b0};
|
||||||
|
|
||||||
|
% if num_hro_regs > 0:
|
||||||
|
// for write HRO registers, store the write data into exp_vals
|
||||||
|
always_ff @(negedge clk_i or negedge rst_ni) begin
|
||||||
|
if (!rst_ni) begin
|
||||||
|
pend_trans <= '0;
|
||||||
|
% for hro_reg in hro_regs_list:
|
||||||
|
exp_vals[${hro_map.get(hro_reg.offset)[0]}] <= ${hro_reg.resval};
|
||||||
|
% endfor
|
||||||
|
end else begin
|
||||||
|
if (h2d.a_valid && d2h.a_ready) begin
|
||||||
|
pend_trans[h2d.a_source].addr <= normalized_addr;
|
||||||
|
if (h2d.a_opcode inside {PutFullData, PutPartialData}) begin
|
||||||
|
pend_trans[h2d.a_source].wr_data <= h2d.a_data & a_mask_bit;
|
||||||
|
pend_trans[h2d.a_source].wr_pending <= 1'b1;
|
||||||
|
end else if (h2d.a_opcode == Get) begin
|
||||||
|
pend_trans[h2d.a_source].rd_pending <= 1'b1;
|
||||||
|
end
|
||||||
|
end
|
||||||
|
if (d2h.d_valid) begin
|
||||||
|
if (pend_trans[d2h.d_source].wr_pending == 1) begin
|
||||||
|
if (!d2h.d_error) begin
|
||||||
|
exp_vals[hro_idx] <= pend_trans[d2h.d_source].wr_data;
|
||||||
|
end
|
||||||
|
pend_trans[d2h.d_source].wr_pending <= 1'b0;
|
||||||
|
end
|
||||||
|
if (h2d.d_ready && pend_trans[d2h.d_source].rd_pending == 1) begin
|
||||||
|
pend_trans[d2h.d_source].rd_pending <= 1'b0;
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
// for read HRO registers, assert read out values by access policy and exp_vals
|
||||||
|
% for hro_reg in hro_regs_list:
|
||||||
|
<%
|
||||||
|
r_name = hro_reg.name.lower()
|
||||||
|
reg_addr = hro_reg.offset
|
||||||
|
reg_addr_hex = format(reg_addr, 'x')
|
||||||
|
regwen = hro_reg.regwen
|
||||||
|
reg_mask = 0
|
||||||
|
|
||||||
|
for f in hro_reg.get_field_list():
|
||||||
|
f_access = f.swaccess.key.lower()
|
||||||
|
if f_access == "rw" and regwen == None:
|
||||||
|
reg_mask = reg_mask | f.bits.bitmask()
|
||||||
|
%>\
|
||||||
|
% if reg_mask != 0:
|
||||||
|
<% reg_mask_hex = format(reg_mask, 'x') %>\
|
||||||
|
`ASSERT(${r_name}_rd_A, d2h.d_valid && pend_trans[d2h.d_source].rd_pending &&
|
||||||
|
pend_trans[d2h.d_source].addr == ${addr_width}'h${reg_addr_hex} |->
|
||||||
|
d2h.d_error ||
|
||||||
|
(d2h.d_data & 'h${reg_mask_hex}) == (exp_vals[${hro_map.get(reg_addr)[0]}] & 'h${reg_mask_hex}))
|
||||||
|
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
% endif
|
||||||
|
|
||||||
|
// This FPV only assumption is to reduce the FPV runtime.
|
||||||
|
`ASSUME_FPV(TlulSource_M, h2d.a_source >= 0 && h2d.a_source <= MAX_A_SOURCE, clk_i, !rst_ni)
|
||||||
|
|
||||||
|
`ifdef UVM
|
||||||
|
initial forever begin
|
||||||
|
bit csr_assert_en;
|
||||||
|
uvm_config_db#(bit)::wait_modified(null, "%m", "csr_assert_en");
|
||||||
|
if (!uvm_config_db#(bit)::get(null, "%m", "csr_assert_en", csr_assert_en)) begin
|
||||||
|
`uvm_fatal("csr_assert", "Can't find csr_assert_en")
|
||||||
|
end
|
||||||
|
disable_sva = !csr_assert_en;
|
||||||
|
end
|
||||||
|
`endif
|
||||||
|
|
||||||
|
`endif
|
||||||
|
`endif
|
||||||
|
% endif
|
||||||
|
endmodule
|
||||||
|
</%def>\
|
||||||
|
${construct_classes(block)}
|
|
@ -0,0 +1,114 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
"""
|
||||||
|
Generate HTML documentation from Block
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import TextIO
|
||||||
|
|
||||||
|
from .ip_block import IpBlock
|
||||||
|
from .html_helpers import render_td
|
||||||
|
from .signal import Signal
|
||||||
|
|
||||||
|
|
||||||
|
def genout(outfile: TextIO, msg: str) -> None:
|
||||||
|
outfile.write(msg)
|
||||||
|
|
||||||
|
|
||||||
|
def name_width(x: Signal) -> str:
|
||||||
|
if x.bits.width() == 1:
|
||||||
|
return x.name
|
||||||
|
|
||||||
|
return '{}[{}:0]'.format(x.name, x.bits.msb)
|
||||||
|
|
||||||
|
|
||||||
|
def gen_kv(outfile: TextIO, key: str, value: str) -> None:
|
||||||
|
genout(outfile,
|
||||||
|
'<p><i>{}:</i> {}</p>\n'.format(key, value))
|
||||||
|
|
||||||
|
|
||||||
|
def gen_cfg_html(cfgs: IpBlock, outfile: TextIO) -> None:
|
||||||
|
rnames = cfgs.get_rnames()
|
||||||
|
|
||||||
|
ot_server = 'https://docs.opentitan.org'
|
||||||
|
comport_url = ot_server + '/doc/rm/comportability_specification'
|
||||||
|
genout(outfile,
|
||||||
|
'<p>Referring to the <a href="{url}">Comportable guideline for '
|
||||||
|
'peripheral device functionality</a>, the module '
|
||||||
|
'<b><code>{mod_name}</code></b> has the following hardware '
|
||||||
|
'interfaces defined.</p>\n'
|
||||||
|
.format(url=comport_url, mod_name=cfgs.name))
|
||||||
|
|
||||||
|
# clocks
|
||||||
|
gen_kv(outfile,
|
||||||
|
'Primary Clock',
|
||||||
|
'<b><code>{}</code></b>'.format(cfgs.clocking.primary.clock))
|
||||||
|
other_clocks = cfgs.clocking.other_clocks()
|
||||||
|
if other_clocks:
|
||||||
|
other_clocks_str = ['<b><code>{}</code></b>'.format(clk)
|
||||||
|
for clk in other_clocks]
|
||||||
|
gen_kv(outfile, 'Other Clocks', ', '.join(other_clocks_str))
|
||||||
|
else:
|
||||||
|
gen_kv(outfile, 'Other Clocks', '<i>none</i>')
|
||||||
|
|
||||||
|
# bus interfaces
|
||||||
|
dev_ports = ['<b><code>{}</code></b>'.format(port)
|
||||||
|
for port in cfgs.bus_interfaces.get_port_names(False, True)]
|
||||||
|
assert dev_ports
|
||||||
|
gen_kv(outfile, 'Bus Device Interfaces (TL-UL)', ', '.join(dev_ports))
|
||||||
|
|
||||||
|
host_ports = ['<b><code>{}</code></b>'.format(port)
|
||||||
|
for port in cfgs.bus_interfaces.get_port_names(True, False)]
|
||||||
|
if host_ports:
|
||||||
|
gen_kv(outfile, 'Bus Host Interfaces (TL-UL)', ', '.join(host_ports))
|
||||||
|
else:
|
||||||
|
gen_kv(outfile, 'Bus Host Interfaces (TL-UL)', '<i>none</i>')
|
||||||
|
|
||||||
|
# IO
|
||||||
|
ios = ([('input', x) for x in cfgs.xputs[1]] +
|
||||||
|
[('output', x) for x in cfgs.xputs[2]] +
|
||||||
|
[('inout', x) for x in cfgs.xputs[0]])
|
||||||
|
if ios:
|
||||||
|
genout(outfile, "<p><i>Peripheral Pins for Chip IO:</i></p>\n")
|
||||||
|
genout(
|
||||||
|
outfile, "<table class=\"cfgtable\"><tr>" +
|
||||||
|
"<th>Pin name</th><th>direction</th>" +
|
||||||
|
"<th>Description</th></tr>\n")
|
||||||
|
for direction, x in ios:
|
||||||
|
genout(outfile,
|
||||||
|
'<tr><td>{}</td><td>{}</td>{}</tr>'
|
||||||
|
.format(name_width(x),
|
||||||
|
direction,
|
||||||
|
render_td(x.desc, rnames, None)))
|
||||||
|
genout(outfile, "</table>\n")
|
||||||
|
else:
|
||||||
|
genout(outfile, "<p><i>Peripheral Pins for Chip IO: none</i></p>\n")
|
||||||
|
|
||||||
|
if not cfgs.interrupts:
|
||||||
|
genout(outfile, "<p><i>Interrupts: none</i></p>\n")
|
||||||
|
else:
|
||||||
|
genout(outfile, "<p><i>Interrupts:</i></p>\n")
|
||||||
|
genout(
|
||||||
|
outfile, "<table class=\"cfgtable\"><tr><th>Interrupt Name</th>" +
|
||||||
|
"<th>Description</th></tr>\n")
|
||||||
|
for x in cfgs.interrupts:
|
||||||
|
genout(outfile,
|
||||||
|
'<tr><td>{}</td>{}</tr>'
|
||||||
|
.format(name_width(x),
|
||||||
|
render_td(x.desc, rnames, None)))
|
||||||
|
genout(outfile, "</table>\n")
|
||||||
|
|
||||||
|
if not cfgs.alerts:
|
||||||
|
genout(outfile, "<p><i>Security Alerts: none</i></p>\n")
|
||||||
|
else:
|
||||||
|
genout(outfile, "<p><i>Security Alerts:</i></p>\n")
|
||||||
|
genout(
|
||||||
|
outfile, "<table class=\"cfgtable\"><tr><th>Alert Name</th>" +
|
||||||
|
"<th>Description</th></tr>\n")
|
||||||
|
for x in cfgs.alerts:
|
||||||
|
genout(outfile,
|
||||||
|
'<tr><td>{}</td>{}</tr>'
|
||||||
|
.format(x.name,
|
||||||
|
render_td(x.desc, rnames, None)))
|
||||||
|
genout(outfile, "</table>\n")
|
|
@ -0,0 +1,445 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
"""
|
||||||
|
Generate C header from validated register JSON tree
|
||||||
|
"""
|
||||||
|
|
||||||
|
import io
|
||||||
|
import logging as log
|
||||||
|
import sys
|
||||||
|
import textwrap
|
||||||
|
import warnings
|
||||||
|
from typing import List, Optional, Set, TextIO
|
||||||
|
|
||||||
|
|
||||||
|
from .field import Field
|
||||||
|
from .ip_block import IpBlock
|
||||||
|
from .params import LocalParam
|
||||||
|
from .register import Register
|
||||||
|
from .multi_register import MultiRegister
|
||||||
|
from .signal import Signal
|
||||||
|
from .window import Window
|
||||||
|
|
||||||
|
|
||||||
|
def genout(outfile: TextIO, msg: str) -> None:
|
||||||
|
outfile.write(msg)
|
||||||
|
|
||||||
|
|
||||||
|
def to_snake_case(s: str) -> str:
|
||||||
|
val = []
|
||||||
|
for i, ch in enumerate(s):
|
||||||
|
if i > 0 and ch.isupper():
|
||||||
|
val.append('_')
|
||||||
|
val.append(ch)
|
||||||
|
return ''.join(val)
|
||||||
|
|
||||||
|
|
||||||
|
def as_define(s: str) -> str:
|
||||||
|
s = s.upper()
|
||||||
|
r = ''
|
||||||
|
for i in range(0, len(s)):
|
||||||
|
r += s[i] if s[i].isalnum() else '_'
|
||||||
|
return r
|
||||||
|
|
||||||
|
|
||||||
|
def first_line(s: str) -> str:
|
||||||
|
"""Returns the first line of a multi-line string"""
|
||||||
|
return s.splitlines()[0]
|
||||||
|
|
||||||
|
|
||||||
|
def format_comment(s: str) -> str:
|
||||||
|
"""Formats a string to comment wrapped to an 80 character line width
|
||||||
|
|
||||||
|
Returns wrapped string including newline and // comment characters.
|
||||||
|
"""
|
||||||
|
return '\n'.join(
|
||||||
|
textwrap.wrap(
|
||||||
|
s, width=77, initial_indent='// ', subsequent_indent='// ')) + '\n'
|
||||||
|
|
||||||
|
|
||||||
|
def gen_define(name: str,
|
||||||
|
args: List[str],
|
||||||
|
body: str,
|
||||||
|
existing_defines: Set[str],
|
||||||
|
indent: str = ' ') -> str:
|
||||||
|
r"""Produces a #define string, will split into two lines if a single line
|
||||||
|
has a width greater than 80 characters. Result includes newline.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
name - Name of the #define
|
||||||
|
args - List of arguments for the define, provide an empty list if there are
|
||||||
|
none
|
||||||
|
body - Body of the #define
|
||||||
|
existing_defines - set of already generated define names.
|
||||||
|
Error if `name` is in `existing_defines`.
|
||||||
|
indent - Gives string to prepend on any new lines produced by
|
||||||
|
wrapping (default ' ')
|
||||||
|
|
||||||
|
Example result:
|
||||||
|
name = 'A_MACRO'
|
||||||
|
args = ['arg1', 'arg2'],
|
||||||
|
body = 'arg1 + arg2 + 10'
|
||||||
|
|
||||||
|
#define A_MACRO(arg1, arg2) arg1 + arg2 + 10
|
||||||
|
|
||||||
|
When the macro is wrapped the break happens after the argument list (or
|
||||||
|
macro name if there is no argument list
|
||||||
|
|
||||||
|
#define A_MACRO(arg1, arg2) \
|
||||||
|
arg1 + arg2 + 10
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
if name in existing_defines:
|
||||||
|
log.error("Duplicate #define for " + name)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if len(args) != 0:
|
||||||
|
define_declare = '#define ' + name + '(' + ', '.join(args) + ')'
|
||||||
|
else:
|
||||||
|
define_declare = '#define ' + name
|
||||||
|
|
||||||
|
oneline_define = define_declare + ' ' + body
|
||||||
|
|
||||||
|
existing_defines.add(name)
|
||||||
|
|
||||||
|
if len(oneline_define) <= 80:
|
||||||
|
return oneline_define + '\n'
|
||||||
|
|
||||||
|
return define_declare + ' \\\n' + indent + body + '\n'
|
||||||
|
|
||||||
|
|
||||||
|
def gen_cdefine_register(outstr: TextIO,
|
||||||
|
reg: Register,
|
||||||
|
comp: str,
|
||||||
|
width: int,
|
||||||
|
rnames: Set[str],
|
||||||
|
existing_defines: Set[str]) -> None:
|
||||||
|
rname = reg.name
|
||||||
|
offset = reg.offset
|
||||||
|
|
||||||
|
genout(outstr, format_comment(first_line(reg.desc)))
|
||||||
|
defname = as_define(comp + '_' + rname)
|
||||||
|
genout(
|
||||||
|
outstr,
|
||||||
|
gen_define(defname + '_REG_OFFSET', [], hex(offset), existing_defines))
|
||||||
|
genout(
|
||||||
|
outstr,
|
||||||
|
gen_define(defname + '_REG_RESVAL', [],
|
||||||
|
hex(reg.resval), existing_defines))
|
||||||
|
|
||||||
|
for field in reg.fields:
|
||||||
|
dname = defname + '_' + as_define(field.name)
|
||||||
|
field_width = field.bits.width()
|
||||||
|
|
||||||
|
if field_width == 1:
|
||||||
|
# single bit
|
||||||
|
genout(
|
||||||
|
outstr,
|
||||||
|
gen_define(dname + '_BIT', [], str(field.bits.lsb),
|
||||||
|
existing_defines))
|
||||||
|
else:
|
||||||
|
# multiple bits (unless it is the whole register)
|
||||||
|
if field_width != width:
|
||||||
|
mask = field.bits.bitmask() >> field.bits.lsb
|
||||||
|
genout(
|
||||||
|
outstr,
|
||||||
|
gen_define(dname + '_MASK', [], hex(mask),
|
||||||
|
existing_defines))
|
||||||
|
genout(
|
||||||
|
outstr,
|
||||||
|
gen_define(dname + '_OFFSET', [], str(field.bits.lsb),
|
||||||
|
existing_defines))
|
||||||
|
genout(
|
||||||
|
outstr,
|
||||||
|
gen_define(
|
||||||
|
dname + '_FIELD', [],
|
||||||
|
'((bitfield_field32_t) {{ .mask = {dname}_MASK, .index = {dname}_OFFSET }})'
|
||||||
|
.format(dname=dname), existing_defines))
|
||||||
|
if field.enum is not None:
|
||||||
|
for enum in field.enum:
|
||||||
|
ename = as_define(enum.name)
|
||||||
|
value = hex(enum.value)
|
||||||
|
genout(
|
||||||
|
outstr,
|
||||||
|
gen_define(
|
||||||
|
defname + '_' + as_define(field.name) +
|
||||||
|
'_VALUE_' + ename, [], value, existing_defines))
|
||||||
|
genout(outstr, '\n')
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
def gen_cdefine_window(outstr: TextIO,
|
||||||
|
win: Window,
|
||||||
|
comp: str,
|
||||||
|
regwidth: int,
|
||||||
|
rnames: Set[str],
|
||||||
|
existing_defines: Set[str]) -> None:
|
||||||
|
offset = win.offset
|
||||||
|
|
||||||
|
genout(outstr, format_comment('Memory area: ' + first_line(win.desc)))
|
||||||
|
defname = as_define(comp + '_' + win.name)
|
||||||
|
genout(
|
||||||
|
outstr,
|
||||||
|
gen_define(defname + '_REG_OFFSET', [], hex(offset), existing_defines))
|
||||||
|
items = win.items
|
||||||
|
genout(
|
||||||
|
outstr,
|
||||||
|
gen_define(defname + '_SIZE_WORDS', [], str(items), existing_defines))
|
||||||
|
items = items * (regwidth // 8)
|
||||||
|
genout(
|
||||||
|
outstr,
|
||||||
|
gen_define(defname + '_SIZE_BYTES', [], str(items), existing_defines))
|
||||||
|
|
||||||
|
wid = win.validbits
|
||||||
|
if (wid != regwidth):
|
||||||
|
mask = (1 << wid) - 1
|
||||||
|
genout(outstr,
|
||||||
|
gen_define(defname + '_MASK ', [], hex(mask), existing_defines))
|
||||||
|
|
||||||
|
|
||||||
|
def gen_cdefines_module_param(outstr: TextIO,
|
||||||
|
param: LocalParam,
|
||||||
|
module_name: str,
|
||||||
|
existing_defines: Set[str]) -> None:
|
||||||
|
# Presently there is only one type (int), however if the new types are
|
||||||
|
# added, they potentially need to be handled differently.
|
||||||
|
known_types = ["int"]
|
||||||
|
if param.param_type not in known_types:
|
||||||
|
warnings.warn("Cannot generate a module define of type {}"
|
||||||
|
.format(param.param_type))
|
||||||
|
return
|
||||||
|
|
||||||
|
if param.desc is not None:
|
||||||
|
genout(outstr, format_comment(first_line(param.desc)))
|
||||||
|
# Heuristic: if the name already has underscores, it's already snake_case,
|
||||||
|
# otherwise, assume StudlyCaps and covert it to snake_case.
|
||||||
|
param_name = param.name if '_' in param.name else to_snake_case(param.name)
|
||||||
|
define_name = as_define(module_name + '_PARAM_' + param_name)
|
||||||
|
if param.param_type == "int":
|
||||||
|
define = gen_define(define_name, [], param.value,
|
||||||
|
existing_defines)
|
||||||
|
|
||||||
|
genout(outstr, define)
|
||||||
|
genout(outstr, '\n')
|
||||||
|
|
||||||
|
|
||||||
|
def gen_cdefines_module_params(outstr: TextIO,
|
||||||
|
module_data: IpBlock,
|
||||||
|
module_name: str,
|
||||||
|
register_width: int,
|
||||||
|
existing_defines: Set[str]) -> None:
|
||||||
|
module_params = module_data.params
|
||||||
|
|
||||||
|
for param in module_params.get_localparams():
|
||||||
|
gen_cdefines_module_param(outstr, param, module_name, existing_defines)
|
||||||
|
|
||||||
|
genout(outstr, format_comment(first_line("Register width")))
|
||||||
|
define_name = as_define(module_name + '_PARAM_REG_WIDTH')
|
||||||
|
define = gen_define(define_name, [], str(register_width), existing_defines)
|
||||||
|
genout(outstr, define)
|
||||||
|
genout(outstr, '\n')
|
||||||
|
|
||||||
|
|
||||||
|
def gen_multireg_field_defines(outstr: TextIO,
|
||||||
|
regname: str,
|
||||||
|
field: Field,
|
||||||
|
subreg_num: int,
|
||||||
|
regwidth: int,
|
||||||
|
existing_defines: Set[str]) -> None:
|
||||||
|
field_width = field.bits.width()
|
||||||
|
fields_per_reg = regwidth // field_width
|
||||||
|
|
||||||
|
define_name = regname + '_' + as_define(field.name + "_FIELD_WIDTH")
|
||||||
|
define = gen_define(define_name, [], str(field_width), existing_defines)
|
||||||
|
genout(outstr, define)
|
||||||
|
|
||||||
|
define_name = regname + '_' + as_define(field.name + "_FIELDS_PER_REG")
|
||||||
|
define = gen_define(define_name, [], str(fields_per_reg), existing_defines)
|
||||||
|
genout(outstr, define)
|
||||||
|
|
||||||
|
define_name = regname + "_MULTIREG_COUNT"
|
||||||
|
define = gen_define(define_name, [], str(subreg_num), existing_defines)
|
||||||
|
genout(outstr, define)
|
||||||
|
|
||||||
|
genout(outstr, '\n')
|
||||||
|
|
||||||
|
|
||||||
|
def gen_cdefine_multireg(outstr: TextIO,
|
||||||
|
multireg: MultiRegister,
|
||||||
|
component: str,
|
||||||
|
regwidth: int,
|
||||||
|
rnames: Set[str],
|
||||||
|
existing_defines: Set[str]) -> None:
|
||||||
|
comment = multireg.reg.desc + " (common parameters)"
|
||||||
|
genout(outstr, format_comment(first_line(comment)))
|
||||||
|
if len(multireg.reg.fields) == 1:
|
||||||
|
regname = as_define(component + '_' + multireg.reg.name)
|
||||||
|
gen_multireg_field_defines(outstr, regname, multireg.reg.fields[0],
|
||||||
|
len(multireg.regs), regwidth, existing_defines)
|
||||||
|
else:
|
||||||
|
log.warn("Non-homogeneous multireg " + multireg.reg.name +
|
||||||
|
" skip multireg specific data generation.")
|
||||||
|
|
||||||
|
for subreg in multireg.regs:
|
||||||
|
gen_cdefine_register(outstr, subreg, component, regwidth, rnames,
|
||||||
|
existing_defines)
|
||||||
|
|
||||||
|
|
||||||
|
def gen_cdefines_interrupt_field(outstr: TextIO,
|
||||||
|
interrupt: Signal,
|
||||||
|
component: str,
|
||||||
|
regwidth: int,
|
||||||
|
existing_defines: Set[str]) -> None:
|
||||||
|
fieldlsb = interrupt.bits.lsb
|
||||||
|
iname = interrupt.name
|
||||||
|
defname = as_define(component + '_INTR_COMMON_' + iname)
|
||||||
|
|
||||||
|
if interrupt.bits.width() == 1:
|
||||||
|
# single bit
|
||||||
|
genout(
|
||||||
|
outstr,
|
||||||
|
gen_define(defname + '_BIT', [], str(fieldlsb), existing_defines))
|
||||||
|
else:
|
||||||
|
# multiple bits (unless it is the whole register)
|
||||||
|
if interrupt.bits.width() != regwidth:
|
||||||
|
mask = interrupt.bits.msb >> fieldlsb
|
||||||
|
genout(
|
||||||
|
outstr,
|
||||||
|
gen_define(defname + '_MASK', [], hex(mask), existing_defines))
|
||||||
|
genout(
|
||||||
|
outstr,
|
||||||
|
gen_define(defname + '_OFFSET', [], str(fieldlsb),
|
||||||
|
existing_defines))
|
||||||
|
genout(
|
||||||
|
outstr,
|
||||||
|
gen_define(
|
||||||
|
defname + '_FIELD', [],
|
||||||
|
'((bitfield_field32_t) {{ .mask = {dname}_MASK, .index = {dname}_OFFSET }})'
|
||||||
|
.format(dname=defname), existing_defines))
|
||||||
|
|
||||||
|
|
||||||
|
def gen_cdefines_interrupts(outstr: TextIO,
|
||||||
|
block: IpBlock,
|
||||||
|
component: str,
|
||||||
|
regwidth: int,
|
||||||
|
existing_defines: Set[str]) -> None:
|
||||||
|
# If no_auto_intr_regs is true, then we do not generate common defines,
|
||||||
|
# because the bit offsets for a particular interrupt may differ between
|
||||||
|
# the interrupt enable/state/test registers.
|
||||||
|
if block.no_auto_intr:
|
||||||
|
return
|
||||||
|
|
||||||
|
genout(outstr, format_comment(first_line("Common Interrupt Offsets")))
|
||||||
|
for intr in block.interrupts:
|
||||||
|
gen_cdefines_interrupt_field(outstr, intr, component, regwidth,
|
||||||
|
existing_defines)
|
||||||
|
genout(outstr, '\n')
|
||||||
|
|
||||||
|
|
||||||
|
def gen_cdefines(block: IpBlock,
|
||||||
|
outfile: TextIO,
|
||||||
|
src_lic: Optional[str],
|
||||||
|
src_copy: str) -> int:
|
||||||
|
rnames = block.get_rnames()
|
||||||
|
|
||||||
|
outstr = io.StringIO()
|
||||||
|
|
||||||
|
# This tracks the defines that have been generated so far, so we
|
||||||
|
# can error if we attempt to duplicate a definition
|
||||||
|
existing_defines = set() # type: Set[str]
|
||||||
|
|
||||||
|
gen_cdefines_module_params(outstr, block, block.name, block.regwidth,
|
||||||
|
existing_defines)
|
||||||
|
|
||||||
|
gen_cdefines_interrupts(outstr, block, block.name, block.regwidth,
|
||||||
|
existing_defines)
|
||||||
|
|
||||||
|
for rb in block.reg_blocks.values():
|
||||||
|
for x in rb.entries:
|
||||||
|
if isinstance(x, Register):
|
||||||
|
gen_cdefine_register(outstr, x, block.name, block.regwidth, rnames,
|
||||||
|
existing_defines)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if isinstance(x, MultiRegister):
|
||||||
|
gen_cdefine_multireg(outstr, x, block.name, block.regwidth, rnames,
|
||||||
|
existing_defines)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if isinstance(x, Window):
|
||||||
|
gen_cdefine_window(outstr, x, block.name, block.regwidth,
|
||||||
|
rnames, existing_defines)
|
||||||
|
continue
|
||||||
|
|
||||||
|
generated = outstr.getvalue()
|
||||||
|
outstr.close()
|
||||||
|
|
||||||
|
genout(outfile, '// Generated register defines for ' + block.name + '\n\n')
|
||||||
|
if src_copy != '':
|
||||||
|
genout(outfile, '// Copyright information found in source file:\n')
|
||||||
|
genout(outfile, '// ' + src_copy + '\n\n')
|
||||||
|
if src_lic is not None:
|
||||||
|
genout(outfile, '// Licensing information found in source file:\n')
|
||||||
|
for line in src_lic.splitlines():
|
||||||
|
genout(outfile, '// ' + line + '\n')
|
||||||
|
genout(outfile, '\n')
|
||||||
|
|
||||||
|
# Header Include Guard
|
||||||
|
genout(outfile, '#ifndef _' + as_define(block.name) + '_REG_DEFS_\n')
|
||||||
|
genout(outfile, '#define _' + as_define(block.name) + '_REG_DEFS_\n\n')
|
||||||
|
|
||||||
|
# Header Extern Guard (so header can be used from C and C++)
|
||||||
|
genout(outfile, '#ifdef __cplusplus\n')
|
||||||
|
genout(outfile, 'extern "C" {\n')
|
||||||
|
genout(outfile, '#endif\n')
|
||||||
|
|
||||||
|
genout(outfile, generated)
|
||||||
|
|
||||||
|
# Header Extern Guard
|
||||||
|
genout(outfile, '#ifdef __cplusplus\n')
|
||||||
|
genout(outfile, '} // extern "C"\n')
|
||||||
|
genout(outfile, '#endif\n')
|
||||||
|
|
||||||
|
# Header Include Guard
|
||||||
|
genout(outfile, '#endif // _' + as_define(block.name) + '_REG_DEFS_\n')
|
||||||
|
|
||||||
|
genout(outfile, '// End generated register defines for ' + block.name)
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_gen_define() -> None:
|
||||||
|
basic_oneline = '#define MACRO_NAME body\n'
|
||||||
|
assert gen_define('MACRO_NAME', [], 'body', set()) == basic_oneline
|
||||||
|
|
||||||
|
basic_oneline_with_args = '#define MACRO_NAME(arg1, arg2) arg1 + arg2\n'
|
||||||
|
assert (gen_define('MACRO_NAME', ['arg1', 'arg2'], 'arg1 + arg2',
|
||||||
|
set()) == basic_oneline_with_args)
|
||||||
|
|
||||||
|
long_macro_name = 'A_VERY_VERY_VERY_VERY_VERY_VERY_VERY_VERY_VERY_VERY_VERY_LONG_MACRO_NAME'
|
||||||
|
|
||||||
|
multiline = ('#define ' + long_macro_name + ' \\\n' +
|
||||||
|
' a_fairly_long_body + something_else + 10\n')
|
||||||
|
|
||||||
|
assert (gen_define(long_macro_name, [],
|
||||||
|
'a_fairly_long_body + something_else + 10',
|
||||||
|
set()) == multiline)
|
||||||
|
|
||||||
|
multiline_with_args = ('#define ' + long_macro_name +
|
||||||
|
'(arg1, arg2, arg3) \\\n' +
|
||||||
|
' a_fairly_long_body + arg1 + arg2 + arg3\n')
|
||||||
|
|
||||||
|
assert (gen_define(long_macro_name, ['arg1', 'arg2', 'arg3'],
|
||||||
|
'a_fairly_long_body + arg1 + arg2 + arg3',
|
||||||
|
set()) == multiline_with_args)
|
||||||
|
|
||||||
|
multiline_with_args_big_indent = (
|
||||||
|
'#define ' + long_macro_name + '(arg1, arg2, arg3) \\\n' +
|
||||||
|
' a_fairly_long_body + arg1 + arg2 + arg3\n')
|
||||||
|
|
||||||
|
assert (gen_define(long_macro_name, ['arg1', 'arg2', 'arg3'],
|
||||||
|
'a_fairly_long_body + arg1 + arg2 + arg3',
|
||||||
|
set(),
|
||||||
|
indent=' ') == multiline_with_args_big_indent)
|
|
@ -0,0 +1,109 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
'''Generate DV code for an IP block'''
|
||||||
|
|
||||||
|
import logging as log
|
||||||
|
import os
|
||||||
|
from typing import List, Union
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
from mako import exceptions # type: ignore
|
||||||
|
from mako.lookup import TemplateLookup # type: ignore
|
||||||
|
from pkg_resources import resource_filename
|
||||||
|
|
||||||
|
from .ip_block import IpBlock
|
||||||
|
from .multi_register import MultiRegister
|
||||||
|
from .register import Register
|
||||||
|
from .window import Window
|
||||||
|
|
||||||
|
|
||||||
|
def bcname(esc_if_name: str) -> str:
|
||||||
|
'''Get the name of the dv_base_reg_block subclass for this device interface'''
|
||||||
|
return esc_if_name + "_reg_block"
|
||||||
|
|
||||||
|
|
||||||
|
def rcname(esc_if_name: str, r: Union[Register, MultiRegister]) -> str:
|
||||||
|
'''Get the name of the dv_base_reg subclass for this register'''
|
||||||
|
return '{}_reg_{}'.format(esc_if_name, r.name.lower())
|
||||||
|
|
||||||
|
|
||||||
|
def mcname(esc_if_name: str, m: Window) -> str:
|
||||||
|
'''Get the name of the dv_base_mem subclass for this memory'''
|
||||||
|
return '{}_mem_{}'.format(esc_if_name, m.name.lower())
|
||||||
|
|
||||||
|
|
||||||
|
def miname(m: Window) -> str:
|
||||||
|
'''Get the lower-case name of a memory block'''
|
||||||
|
return m.name.lower()
|
||||||
|
|
||||||
|
|
||||||
|
def gen_core_file(outdir: str,
|
||||||
|
lblock: str,
|
||||||
|
dv_base_prefix: str,
|
||||||
|
paths: List[str]) -> None:
|
||||||
|
depends = ["lowrisc:dv:dv_base_reg"]
|
||||||
|
if dv_base_prefix and dv_base_prefix != "dv_base":
|
||||||
|
depends.append("lowrisc:dv:{}_reg".format(dv_base_prefix))
|
||||||
|
|
||||||
|
# Generate a fusesoc core file that points at the files we've just
|
||||||
|
# generated.
|
||||||
|
core_data = {
|
||||||
|
'name': "lowrisc:dv:{}_ral_pkg".format(lblock),
|
||||||
|
'filesets': {
|
||||||
|
'files_dv': {
|
||||||
|
'depend': depends,
|
||||||
|
'files': paths,
|
||||||
|
'file_type': 'systemVerilogSource'
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'targets': {
|
||||||
|
'default': {
|
||||||
|
'filesets': [
|
||||||
|
'files_dv',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
core_file_path = os.path.join(outdir, lblock + '_ral_pkg.core')
|
||||||
|
with open(core_file_path, 'w') as core_file:
|
||||||
|
core_file.write('CAPI=2:\n')
|
||||||
|
yaml.dump(core_data, core_file, encoding='utf-8')
|
||||||
|
|
||||||
|
|
||||||
|
def gen_dv(block: IpBlock, dv_base_prefix: str, outdir: str) -> int:
|
||||||
|
'''Generate DV files for an IpBlock'''
|
||||||
|
|
||||||
|
lookup = TemplateLookup(directories=[resource_filename('reggen', '.')])
|
||||||
|
uvm_reg_tpl = lookup.get_template('uvm_reg.sv.tpl')
|
||||||
|
|
||||||
|
# Generate the RAL package(s). For a device interface with no name we
|
||||||
|
# generate the package "<block>_ral_pkg" (writing to <block>_ral_pkg.sv).
|
||||||
|
# In any other case, we also need the interface name, giving
|
||||||
|
# <block>_<ifname>_ral_pkg.
|
||||||
|
generated = []
|
||||||
|
|
||||||
|
lblock = block.name.lower()
|
||||||
|
for if_name, rb in block.reg_blocks.items():
|
||||||
|
hier_path = '' if block.hier_path is None else block.hier_path + '.'
|
||||||
|
if_suffix = '' if if_name is None else '_' + if_name.lower()
|
||||||
|
mod_base = lblock + if_suffix
|
||||||
|
reg_block_path = hier_path + 'u_reg' + if_suffix
|
||||||
|
|
||||||
|
file_name = mod_base + '_ral_pkg.sv'
|
||||||
|
generated.append(file_name)
|
||||||
|
reg_top_path = os.path.join(outdir, file_name)
|
||||||
|
with open(reg_top_path, 'w', encoding='UTF-8') as fout:
|
||||||
|
try:
|
||||||
|
fout.write(uvm_reg_tpl.render(rb=rb,
|
||||||
|
block=block,
|
||||||
|
esc_if_name=mod_base,
|
||||||
|
reg_block_path=reg_block_path,
|
||||||
|
dv_base_prefix=dv_base_prefix))
|
||||||
|
except: # noqa F722 for template Exception handling
|
||||||
|
log.error(exceptions.text_error_template().render())
|
||||||
|
return 1
|
||||||
|
|
||||||
|
gen_core_file(outdir, lblock, dv_base_prefix, generated)
|
||||||
|
return 0
|
|
@ -0,0 +1,81 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
# # Lint as: python3
|
||||||
|
#
|
||||||
|
"""Generate FPV CSR read and write assertions from IpBlock
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging as log
|
||||||
|
import os.path
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
from mako import exceptions # type: ignore
|
||||||
|
from mako.template import Template # type: ignore
|
||||||
|
from pkg_resources import resource_filename
|
||||||
|
|
||||||
|
from .ip_block import IpBlock
|
||||||
|
|
||||||
|
|
||||||
|
def gen_fpv(block: IpBlock, outdir: str) -> int:
|
||||||
|
# Read Register templates
|
||||||
|
fpv_csr_tpl = Template(
|
||||||
|
filename=resource_filename('reggen', 'fpv_csr.sv.tpl'))
|
||||||
|
|
||||||
|
# Generate a module with CSR assertions for each device interface. For a
|
||||||
|
# device interface with no name, we generate <block>_csr_assert_fpv. For a
|
||||||
|
# named interface, we generate <block>_<ifname>_csr_assert_fpv.
|
||||||
|
lblock = block.name.lower()
|
||||||
|
generated = []
|
||||||
|
for if_name, rb in block.reg_blocks.items():
|
||||||
|
if not rb.flat_regs:
|
||||||
|
# No registers to check!
|
||||||
|
continue
|
||||||
|
|
||||||
|
if if_name is None:
|
||||||
|
mod_base = lblock
|
||||||
|
else:
|
||||||
|
mod_base = lblock + '_' + if_name.lower()
|
||||||
|
|
||||||
|
mod_name = mod_base + '_csr_assert_fpv'
|
||||||
|
filename = mod_name + '.sv'
|
||||||
|
generated.append(filename)
|
||||||
|
reg_top_path = os.path.join(outdir, filename)
|
||||||
|
with open(reg_top_path, 'w', encoding='UTF-8') as fout:
|
||||||
|
try:
|
||||||
|
fout.write(fpv_csr_tpl.render(block=block,
|
||||||
|
mod_base=mod_base,
|
||||||
|
if_name=if_name,
|
||||||
|
rb=rb))
|
||||||
|
except: # noqa F722 for template Exception handling
|
||||||
|
log.error(exceptions.text_error_template().render())
|
||||||
|
return 1
|
||||||
|
|
||||||
|
# Generate a fusesoc core file that points at the files we've just
|
||||||
|
# generated.
|
||||||
|
core_data = {
|
||||||
|
'name': "lowrisc:fpv:{}_csr_assert".format(lblock),
|
||||||
|
'filesets': {
|
||||||
|
'files_dv': {
|
||||||
|
'depend': [
|
||||||
|
"lowrisc:tlul:headers",
|
||||||
|
"lowrisc:prim:assert",
|
||||||
|
],
|
||||||
|
'files': generated,
|
||||||
|
'file_type': 'systemVerilogSource'
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'targets': {
|
||||||
|
'default': {
|
||||||
|
'filesets': [
|
||||||
|
'files_dv',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
core_file_path = os.path.join(outdir, lblock + '_csr_assert_fpv.core')
|
||||||
|
with open(core_file_path, 'w') as core_file:
|
||||||
|
core_file.write('CAPI=2:\n')
|
||||||
|
yaml.dump(core_data, core_file, encoding='utf-8')
|
||||||
|
|
||||||
|
return 0
|
|
@ -0,0 +1,325 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
"""
|
||||||
|
Generate HTML documentation from IpBlock
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Set, TextIO
|
||||||
|
|
||||||
|
from .ip_block import IpBlock
|
||||||
|
from .html_helpers import expand_paras, render_td
|
||||||
|
from .multi_register import MultiRegister
|
||||||
|
from .reg_block import RegBlock
|
||||||
|
from .register import Register
|
||||||
|
from .window import Window
|
||||||
|
|
||||||
|
|
||||||
|
def genout(outfile: TextIO, msg: str) -> None:
|
||||||
|
outfile.write(msg)
|
||||||
|
|
||||||
|
|
||||||
|
# Generation of HTML table with register bit-field summary picture
|
||||||
|
# Max 16-bit wide on one line
|
||||||
|
|
||||||
|
|
||||||
|
def gen_tbl_row(outfile: TextIO, msb: int, width: int, close: bool) -> None:
|
||||||
|
if (close):
|
||||||
|
genout(outfile, "</tr>\n")
|
||||||
|
genout(outfile, "<tr>")
|
||||||
|
for x in range(msb, msb - width, -1):
|
||||||
|
genout(outfile, "<td class=\"bitnum\">" + str(x) + "</td>")
|
||||||
|
|
||||||
|
genout(outfile, "</tr><tr>")
|
||||||
|
|
||||||
|
|
||||||
|
def gen_html_reg_pic(outfile: TextIO, reg: Register, width: int) -> None:
|
||||||
|
|
||||||
|
if (width > 32):
|
||||||
|
bsize = 3
|
||||||
|
nextbit = 63
|
||||||
|
hdrbits = 16
|
||||||
|
nextline = 48
|
||||||
|
elif (width > 16):
|
||||||
|
bsize = 3
|
||||||
|
nextbit = 31
|
||||||
|
hdrbits = 16
|
||||||
|
nextline = 16
|
||||||
|
elif (width > 8):
|
||||||
|
bsize = 3
|
||||||
|
nextbit = 15
|
||||||
|
nextline = 0
|
||||||
|
hdrbits = 16
|
||||||
|
else:
|
||||||
|
bsize = 12
|
||||||
|
nextbit = 7
|
||||||
|
nextline = 0
|
||||||
|
hdrbits = 8
|
||||||
|
|
||||||
|
genout(outfile, "<table class=\"regpic\">")
|
||||||
|
gen_tbl_row(outfile, nextbit, hdrbits, False)
|
||||||
|
|
||||||
|
for field in reversed(reg.fields):
|
||||||
|
fieldlsb = field.bits.lsb
|
||||||
|
fieldwidth = field.bits.width()
|
||||||
|
fieldmsb = field.bits.msb
|
||||||
|
fname = field.name
|
||||||
|
|
||||||
|
while nextbit > fieldmsb:
|
||||||
|
if (nextbit >= nextline) and (fieldmsb < nextline):
|
||||||
|
spans = nextbit - (nextline - 1)
|
||||||
|
else:
|
||||||
|
spans = nextbit - fieldmsb
|
||||||
|
genout(
|
||||||
|
outfile, "<td class=\"unused\" colspan=" + str(spans) +
|
||||||
|
"> </td>\n")
|
||||||
|
if (nextbit >= nextline) and (fieldmsb < nextline):
|
||||||
|
nextbit = nextline - 1
|
||||||
|
gen_tbl_row(outfile, nextbit, hdrbits, True)
|
||||||
|
nextline = nextline - 16
|
||||||
|
else:
|
||||||
|
nextbit = fieldmsb
|
||||||
|
|
||||||
|
while (fieldmsb >= nextline) and (fieldlsb < nextline):
|
||||||
|
spans = fieldmsb - (nextline - 1)
|
||||||
|
genout(
|
||||||
|
outfile, "<td class=\"fname\" colspan=" + str(spans) + ">" +
|
||||||
|
fname + "...</td>\n")
|
||||||
|
fname = "..." + field.name
|
||||||
|
fieldwidth = fieldwidth - spans
|
||||||
|
fieldmsb = nextline - 1
|
||||||
|
nextline = nextline - 16
|
||||||
|
gen_tbl_row(outfile, fieldmsb, hdrbits, True)
|
||||||
|
|
||||||
|
namelen = len(fname)
|
||||||
|
if namelen == 0 or fname == ' ':
|
||||||
|
fname = " "
|
||||||
|
if (namelen > bsize * fieldwidth):
|
||||||
|
usestyle = (" style=\"font-size:" + str(
|
||||||
|
(bsize * 100 * fieldwidth) / namelen) + "%\"")
|
||||||
|
else:
|
||||||
|
usestyle = ""
|
||||||
|
|
||||||
|
genout(
|
||||||
|
outfile, "<td class=\"fname\" colspan=" + str(fieldwidth) +
|
||||||
|
usestyle + ">" + fname + "</td>\n")
|
||||||
|
|
||||||
|
if (fieldlsb == nextline) and nextline > 0:
|
||||||
|
gen_tbl_row(outfile, nextline - 1, hdrbits, True)
|
||||||
|
nextline = nextline - 16
|
||||||
|
|
||||||
|
nextbit = fieldlsb - 1
|
||||||
|
while (nextbit > 0):
|
||||||
|
spans = nextbit - (nextline - 1)
|
||||||
|
genout(outfile,
|
||||||
|
"<td class=\"unused\" colspan=" + str(spans) + "> </td>\n")
|
||||||
|
nextbit = nextline - 1
|
||||||
|
if (nextline > 0):
|
||||||
|
gen_tbl_row(outfile, nextline - 1, hdrbits, True)
|
||||||
|
nextline = nextline - 16
|
||||||
|
|
||||||
|
genout(outfile, "</tr></table>")
|
||||||
|
|
||||||
|
|
||||||
|
# Generation of HTML table with header, register picture and details
|
||||||
|
|
||||||
|
|
||||||
|
def gen_html_register(outfile: TextIO,
|
||||||
|
reg: Register,
|
||||||
|
comp: str,
|
||||||
|
width: int,
|
||||||
|
rnames: Set[str]) -> None:
|
||||||
|
rname = reg.name
|
||||||
|
offset = reg.offset
|
||||||
|
regwen_div = ''
|
||||||
|
if reg.regwen is not None:
|
||||||
|
regwen_div = (' <div>Register enable = {}</div>\n'
|
||||||
|
.format(reg.regwen))
|
||||||
|
|
||||||
|
desc_paras = expand_paras(reg.desc, rnames)
|
||||||
|
desc_head = desc_paras[0]
|
||||||
|
desc_body = desc_paras[1:]
|
||||||
|
|
||||||
|
genout(outfile,
|
||||||
|
'<table class="regdef" id="Reg_{lrname}">\n'
|
||||||
|
' <tr>\n'
|
||||||
|
' <th class="regdef" colspan=5>\n'
|
||||||
|
' <div>{comp}.{rname} @ {off:#x}</div>\n'
|
||||||
|
' <div>{desc}</div>\n'
|
||||||
|
' <div>Reset default = {resval:#x}, mask {mask:#x}</div>\n'
|
||||||
|
'{wen}'
|
||||||
|
' </th>\n'
|
||||||
|
' </tr>\n'
|
||||||
|
.format(lrname=rname.lower(),
|
||||||
|
comp=comp,
|
||||||
|
rname=rname,
|
||||||
|
off=offset,
|
||||||
|
desc=desc_head,
|
||||||
|
resval=reg.resval,
|
||||||
|
mask=reg.resmask,
|
||||||
|
wen=regwen_div))
|
||||||
|
if desc_body:
|
||||||
|
genout(outfile,
|
||||||
|
'<tr><td colspan=5>{}</td></tr>'
|
||||||
|
.format(''.join(desc_body)))
|
||||||
|
|
||||||
|
genout(outfile, "<tr><td colspan=5>")
|
||||||
|
gen_html_reg_pic(outfile, reg, width)
|
||||||
|
genout(outfile, "</td></tr>\n")
|
||||||
|
|
||||||
|
genout(outfile, "<tr><th width=5%>Bits</th>")
|
||||||
|
genout(outfile, "<th width=5%>Type</th>")
|
||||||
|
genout(outfile, "<th width=5%>Reset</th>")
|
||||||
|
genout(outfile, "<th>Name</th>")
|
||||||
|
genout(outfile, "<th>Description</th></tr>")
|
||||||
|
nextbit = 0
|
||||||
|
fcount = 0
|
||||||
|
|
||||||
|
for field in reg.fields:
|
||||||
|
fcount += 1
|
||||||
|
fname = field.name
|
||||||
|
|
||||||
|
fieldlsb = field.bits.lsb
|
||||||
|
if fieldlsb > nextbit:
|
||||||
|
genout(outfile, "<tr><td class=\"regbits\">")
|
||||||
|
if (nextbit == (fieldlsb - 1)):
|
||||||
|
genout(outfile, str(nextbit))
|
||||||
|
else:
|
||||||
|
genout(outfile, str(fieldlsb - 1) + ":" + str(nextbit))
|
||||||
|
genout(outfile,
|
||||||
|
"</td><td></td><td></td><td></td><td>Reserved</td></tr>")
|
||||||
|
genout(outfile, "<tr><td class=\"regbits\">" + field.bits.as_str() + "</td>")
|
||||||
|
genout(outfile, "<td class=\"regperm\">" + field.swaccess.key + "</td>")
|
||||||
|
genout(
|
||||||
|
outfile, "<td class=\"regrv\">" +
|
||||||
|
('x' if field.resval is None else hex(field.resval)) +
|
||||||
|
"</td>")
|
||||||
|
genout(outfile, "<td class=\"regfn\">" + fname + "</td>")
|
||||||
|
|
||||||
|
# Collect up any description and enum table
|
||||||
|
desc_parts = []
|
||||||
|
|
||||||
|
if field.desc is not None:
|
||||||
|
desc_parts += expand_paras(field.desc, rnames)
|
||||||
|
|
||||||
|
if field.enum is not None:
|
||||||
|
desc_parts.append('<table>')
|
||||||
|
for enum in field.enum:
|
||||||
|
enum_desc_paras = expand_paras(enum.desc, rnames)
|
||||||
|
desc_parts.append('<tr>'
|
||||||
|
'<td>{val}</td>'
|
||||||
|
'<td>{name}</td>'
|
||||||
|
'<td>{desc}</td>'
|
||||||
|
'</tr>\n'
|
||||||
|
.format(val=enum.value,
|
||||||
|
name=enum.name,
|
||||||
|
desc=''.join(enum_desc_paras)))
|
||||||
|
desc_parts.append('</table>')
|
||||||
|
if field.has_incomplete_enum():
|
||||||
|
desc_parts.append("<p>Other values are reserved.</p>")
|
||||||
|
|
||||||
|
genout(outfile,
|
||||||
|
'<td class="regde">{}</td>'.format(''.join(desc_parts)))
|
||||||
|
nextbit = fieldlsb + field.bits.width()
|
||||||
|
|
||||||
|
genout(outfile, "</table>\n<br>\n")
|
||||||
|
|
||||||
|
|
||||||
|
def gen_html_window(outfile: TextIO,
|
||||||
|
win: Window,
|
||||||
|
comp: str,
|
||||||
|
regwidth: int,
|
||||||
|
rnames: Set[str]) -> None:
|
||||||
|
wname = win.name or '(unnamed window)'
|
||||||
|
offset = win.offset
|
||||||
|
genout(outfile,
|
||||||
|
'<table class="regdef" id="Reg_{lwname}">\n'
|
||||||
|
' <tr>\n'
|
||||||
|
' <th class="regdef">\n'
|
||||||
|
' <div>{comp}.{wname} @ + {off:#x}</div>\n'
|
||||||
|
' <div>{items} item {swaccess} window</div>\n'
|
||||||
|
' <div>Byte writes are {byte_writes}supported</div>\n'
|
||||||
|
' </th>\n'
|
||||||
|
' </tr>\n'
|
||||||
|
.format(comp=comp,
|
||||||
|
wname=wname,
|
||||||
|
lwname=wname.lower(),
|
||||||
|
off=offset,
|
||||||
|
items=win.items,
|
||||||
|
swaccess=win.swaccess.key,
|
||||||
|
byte_writes=('' if win.byte_write else '<i>not</i> ')))
|
||||||
|
genout(outfile, '<tr><td><table class="regpic">')
|
||||||
|
genout(outfile, '<tr><td width="10%"></td>')
|
||||||
|
wid = win.validbits
|
||||||
|
|
||||||
|
for x in range(regwidth - 1, -1, -1):
|
||||||
|
if x == regwidth - 1 or x == wid - 1 or x == 0:
|
||||||
|
genout(outfile, '<td class="bitnum">' + str(x) + '</td>')
|
||||||
|
else:
|
||||||
|
genout(outfile, '<td class="bitnum"></td>')
|
||||||
|
genout(outfile, '</tr>')
|
||||||
|
tblmax = win.items - 1
|
||||||
|
for x in [0, 1, 2, tblmax - 1, tblmax]:
|
||||||
|
if x == 2:
|
||||||
|
genout(
|
||||||
|
outfile, '<tr><td> </td><td align=center colspan=' +
|
||||||
|
str(regwidth) + '>...</td></tr>')
|
||||||
|
else:
|
||||||
|
genout(
|
||||||
|
outfile, '<tr><td class="regbits">+' +
|
||||||
|
hex(offset + x * (regwidth // 8)) + '</td>')
|
||||||
|
if wid < regwidth:
|
||||||
|
genout(
|
||||||
|
outfile, '<td class="unused" colspan=' +
|
||||||
|
str(regwidth - wid) + '> </td>\n')
|
||||||
|
genout(
|
||||||
|
outfile,
|
||||||
|
'<td class="fname" colspan=' + str(wid) + '> </td>\n')
|
||||||
|
else:
|
||||||
|
genout(
|
||||||
|
outfile, '<td class="fname" colspan=' + str(regwidth) +
|
||||||
|
'> </td>\n')
|
||||||
|
genout(outfile, '</tr>')
|
||||||
|
genout(outfile, '</td></tr></table>')
|
||||||
|
genout(outfile,
|
||||||
|
'<tr>{}</tr>'.format(render_td(win.desc, rnames, 'regde')))
|
||||||
|
genout(outfile, "</table>\n<br>\n")
|
||||||
|
|
||||||
|
|
||||||
|
def gen_html_reg_block(outfile: TextIO,
|
||||||
|
rb: RegBlock,
|
||||||
|
comp: str,
|
||||||
|
width: int,
|
||||||
|
rnames: Set[str]) -> None:
|
||||||
|
for x in rb.entries:
|
||||||
|
if isinstance(x, Register):
|
||||||
|
gen_html_register(outfile, x, comp, width, rnames)
|
||||||
|
elif isinstance(x, MultiRegister):
|
||||||
|
for reg in x.regs:
|
||||||
|
gen_html_register(outfile, reg, comp, width, rnames)
|
||||||
|
else:
|
||||||
|
assert isinstance(x, Window)
|
||||||
|
gen_html_window(outfile, x, comp, width, rnames)
|
||||||
|
|
||||||
|
|
||||||
|
def gen_html(block: IpBlock, outfile: TextIO) -> int:
|
||||||
|
rnames = block.get_rnames()
|
||||||
|
|
||||||
|
assert block.reg_blocks
|
||||||
|
# Handle the case where there's just one interface
|
||||||
|
if len(block.reg_blocks) == 1:
|
||||||
|
rb = list(block.reg_blocks.values())[0]
|
||||||
|
gen_html_reg_block(outfile, rb, block.name, block.regwidth, rnames)
|
||||||
|
return 0
|
||||||
|
|
||||||
|
# Handle the case where there is more than one device interface and,
|
||||||
|
# correspondingly, more than one reg block.
|
||||||
|
for iface_name, rb in block.reg_blocks.items():
|
||||||
|
iface_desc = ('device interface <code>{}</code>'.format(iface_name)
|
||||||
|
if iface_name is not None
|
||||||
|
else 'the unnamed device interface')
|
||||||
|
genout(outfile,
|
||||||
|
'<h3>Registers visible under {}</h3>'.format(iface_desc))
|
||||||
|
gen_html_reg_block(outfile, rb, block.name, block.regwidth, rnames)
|
||||||
|
|
||||||
|
return 0
|
|
@ -0,0 +1,34 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
"""Generate JSON/compact JSON/Hjson from register JSON tree
|
||||||
|
"""
|
||||||
|
|
||||||
|
import hjson
|
||||||
|
|
||||||
|
|
||||||
|
def gen_json(obj, outfile, format):
|
||||||
|
if format == 'json':
|
||||||
|
hjson.dumpJSON(obj,
|
||||||
|
outfile,
|
||||||
|
ensure_ascii=False,
|
||||||
|
use_decimal=True,
|
||||||
|
indent=' ',
|
||||||
|
for_json=True)
|
||||||
|
elif format == 'compact':
|
||||||
|
hjson.dumpJSON(obj,
|
||||||
|
outfile,
|
||||||
|
ensure_ascii=False,
|
||||||
|
for_json=True,
|
||||||
|
use_decimal=True,
|
||||||
|
separators=(',', ':'))
|
||||||
|
elif format == 'hjson':
|
||||||
|
hjson.dump(obj,
|
||||||
|
outfile,
|
||||||
|
ensure_ascii=False,
|
||||||
|
for_json=True,
|
||||||
|
use_decimal=True)
|
||||||
|
else:
|
||||||
|
raise ValueError('Invalid JSON format ' + format)
|
||||||
|
|
||||||
|
return 0
|
|
@ -0,0 +1,136 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
"""Generate SystemVerilog designs from IpBlock object"""
|
||||||
|
|
||||||
|
import logging as log
|
||||||
|
import os
|
||||||
|
from typing import Dict, Optional, Tuple
|
||||||
|
|
||||||
|
from mako import exceptions # type: ignore
|
||||||
|
from mako.template import Template # type: ignore
|
||||||
|
from pkg_resources import resource_filename
|
||||||
|
|
||||||
|
from .ip_block import IpBlock
|
||||||
|
from .multi_register import MultiRegister
|
||||||
|
from .reg_base import RegBase
|
||||||
|
from .register import Register
|
||||||
|
|
||||||
|
|
||||||
|
def escape_name(name: str) -> str:
|
||||||
|
return name.lower().replace(' ', '_')
|
||||||
|
|
||||||
|
|
||||||
|
def make_box_quote(msg: str, indent: str = ' ') -> str:
|
||||||
|
hr = indent + ('/' * (len(msg) + 6))
|
||||||
|
middle = indent + '// ' + msg + ' //'
|
||||||
|
return '\n'.join([hr, middle, hr])
|
||||||
|
|
||||||
|
|
||||||
|
def _get_awparam_name(iface_name: Optional[str]) -> str:
|
||||||
|
return (iface_name or 'Iface').capitalize() + 'Aw'
|
||||||
|
|
||||||
|
|
||||||
|
def get_addr_widths(block: IpBlock) -> Dict[Optional[str], Tuple[str, int]]:
|
||||||
|
'''Return the address widths for the device interfaces
|
||||||
|
|
||||||
|
Returns a dictionary keyed by interface name whose values are pairs:
|
||||||
|
(paramname, width) where paramname is IfaceAw for an unnamed interface and
|
||||||
|
FooAw for an interface called foo. This is constructed in the same order as
|
||||||
|
block.reg_blocks.
|
||||||
|
|
||||||
|
If there is a single device interface and that interface is unnamed, use
|
||||||
|
the more general parameter name "BlockAw".
|
||||||
|
|
||||||
|
'''
|
||||||
|
assert block.reg_blocks
|
||||||
|
if len(block.reg_blocks) == 1 and None in block.reg_blocks:
|
||||||
|
return {None: ('BlockAw', block.reg_blocks[None].get_addr_width())}
|
||||||
|
|
||||||
|
return {name: (_get_awparam_name(name), rb.get_addr_width())
|
||||||
|
for name, rb in block.reg_blocks.items()}
|
||||||
|
|
||||||
|
|
||||||
|
def get_type_name_pfx(block: IpBlock, iface_name: Optional[str]) -> str:
|
||||||
|
return block.name.lower() + ('' if iface_name is None
|
||||||
|
else '_{}'.format(iface_name.lower()))
|
||||||
|
|
||||||
|
|
||||||
|
def get_r0(reg: RegBase) -> Register:
|
||||||
|
'''Get a Register representing an entry in the RegBase'''
|
||||||
|
if isinstance(reg, Register):
|
||||||
|
return reg
|
||||||
|
else:
|
||||||
|
assert isinstance(reg, MultiRegister)
|
||||||
|
return reg.reg
|
||||||
|
|
||||||
|
|
||||||
|
def get_iface_tx_type(block: IpBlock,
|
||||||
|
iface_name: Optional[str],
|
||||||
|
hw2reg: bool) -> str:
|
||||||
|
x2x = 'hw2reg' if hw2reg else 'reg2hw'
|
||||||
|
pfx = get_type_name_pfx(block, iface_name)
|
||||||
|
return '_'.join([pfx, x2x, 't'])
|
||||||
|
|
||||||
|
|
||||||
|
def get_reg_tx_type(block: IpBlock, reg: RegBase, hw2reg: bool) -> str:
|
||||||
|
'''Get the name of the hw2reg or reg2hw type for reg'''
|
||||||
|
if isinstance(reg, Register):
|
||||||
|
r0 = reg
|
||||||
|
type_suff = 'reg_t'
|
||||||
|
else:
|
||||||
|
assert isinstance(reg, MultiRegister)
|
||||||
|
r0 = reg.reg
|
||||||
|
type_suff = 'mreg_t'
|
||||||
|
|
||||||
|
x2x = 'hw2reg' if hw2reg else 'reg2hw'
|
||||||
|
return '_'.join([block.name.lower(),
|
||||||
|
x2x,
|
||||||
|
r0.name.lower(),
|
||||||
|
type_suff])
|
||||||
|
|
||||||
|
|
||||||
|
def gen_rtl(block: IpBlock, outdir: str) -> int:
|
||||||
|
# Read Register templates
|
||||||
|
reg_top_tpl = Template(
|
||||||
|
filename=resource_filename('reggen', 'reg_top.sv.tpl'))
|
||||||
|
reg_pkg_tpl = Template(
|
||||||
|
filename=resource_filename('reggen', 'reg_pkg.sv.tpl'))
|
||||||
|
|
||||||
|
# Generate <block>_reg_pkg.sv
|
||||||
|
#
|
||||||
|
# This defines the various types used to interface between the *_reg_top
|
||||||
|
# module(s) and the block itself.
|
||||||
|
reg_pkg_path = os.path.join(outdir, block.name.lower() + "_reg_pkg.sv")
|
||||||
|
with open(reg_pkg_path, 'w', encoding='UTF-8') as fout:
|
||||||
|
try:
|
||||||
|
fout.write(reg_pkg_tpl.render(block=block))
|
||||||
|
except: # noqa F722 for template Exception handling
|
||||||
|
log.error(exceptions.text_error_template().render())
|
||||||
|
return 1
|
||||||
|
|
||||||
|
# Generate the register block implementation(s). For a device interface
|
||||||
|
# with no name we generate the register module "<block>_reg_top" (writing
|
||||||
|
# to <block>_reg_top.sv). In any other case, we also need the interface
|
||||||
|
# name, giving <block>_<ifname>_reg_top.
|
||||||
|
lblock = block.name.lower()
|
||||||
|
for if_name, rb in block.reg_blocks.items():
|
||||||
|
if if_name is None:
|
||||||
|
mod_base = lblock
|
||||||
|
else:
|
||||||
|
mod_base = lblock + '_' + if_name.lower()
|
||||||
|
|
||||||
|
mod_name = mod_base + '_reg_top'
|
||||||
|
reg_top_path = os.path.join(outdir, mod_name + '.sv')
|
||||||
|
with open(reg_top_path, 'w', encoding='UTF-8') as fout:
|
||||||
|
try:
|
||||||
|
fout.write(reg_top_tpl.render(block=block,
|
||||||
|
mod_base=mod_base,
|
||||||
|
mod_name=mod_name,
|
||||||
|
if_name=if_name,
|
||||||
|
rb=rb))
|
||||||
|
except: # noqa F722 for template Exception handling
|
||||||
|
log.error(exceptions.text_error_template().render())
|
||||||
|
return 1
|
||||||
|
|
||||||
|
return 0
|
|
@ -0,0 +1,306 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
"""
|
||||||
|
Generates the documentation for the register tool
|
||||||
|
|
||||||
|
"""
|
||||||
|
from .access import SWACCESS_PERMITTED, HWACCESS_PERMITTED
|
||||||
|
from reggen import (validate,
|
||||||
|
ip_block, enum_entry, field,
|
||||||
|
register, multi_register, window)
|
||||||
|
|
||||||
|
|
||||||
|
def genout(outfile, msg):
|
||||||
|
outfile.write(msg)
|
||||||
|
|
||||||
|
|
||||||
|
doc_intro = """
|
||||||
|
|
||||||
|
<!-- Start of output generated by `regtool.py --doc` -->
|
||||||
|
|
||||||
|
The tables describe each key and the type of the value. The following
|
||||||
|
types are used:
|
||||||
|
|
||||||
|
Type | Description
|
||||||
|
---- | -----------
|
||||||
|
"""
|
||||||
|
|
||||||
|
swaccess_intro = """
|
||||||
|
|
||||||
|
Register fields are tagged using the swaccess key to describe the
|
||||||
|
permitted access and side-effects. This key must have one of these
|
||||||
|
values:
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
hwaccess_intro = """
|
||||||
|
|
||||||
|
Register fields are tagged using the hwaccess key to describe the
|
||||||
|
permitted access from hardware logic and side-effects. This key must
|
||||||
|
have one of these values:
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
top_example = """
|
||||||
|
The basic structure of a register definition file is thus:
|
||||||
|
|
||||||
|
```hjson
|
||||||
|
{
|
||||||
|
name: "GP",
|
||||||
|
regwidth: "32",
|
||||||
|
registers: [
|
||||||
|
// register definitions...
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
register_example = """
|
||||||
|
|
||||||
|
The basic register definition group will follow this pattern:
|
||||||
|
|
||||||
|
```hjson
|
||||||
|
{ name: "REGA",
|
||||||
|
desc: "Description of register",
|
||||||
|
swaccess: "rw",
|
||||||
|
resval: "42",
|
||||||
|
fields: [
|
||||||
|
// bit field definitions...
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
The name and brief description are required. If the swaccess key is
|
||||||
|
provided it describes the access pattern that will be used by all
|
||||||
|
bitfields in the register that do not override with their own swaccess
|
||||||
|
key. This is a useful shortcut because in most cases a register will
|
||||||
|
have the same access restrictions for all fields. The reset value of
|
||||||
|
the register may also be provided here or in the individual fields. If
|
||||||
|
it is provided in both places then they must match, if it is provided
|
||||||
|
in neither place then the reset value defaults to zero for all except
|
||||||
|
write-only fields when it defaults to x.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
field_example = """
|
||||||
|
|
||||||
|
Field names should be relatively short because they will be used
|
||||||
|
frequently (and need to fit in the register layout picture!) The field
|
||||||
|
description is expected to be longer and will most likely make use of
|
||||||
|
the Hjson ability to include multi-line strings. An example with three
|
||||||
|
fields:
|
||||||
|
|
||||||
|
```hjson
|
||||||
|
fields: [
|
||||||
|
{ bits: "15:0",
|
||||||
|
name: "RXS",
|
||||||
|
desc: '''
|
||||||
|
Last 16 oversampled values of RX. These are captured at 16x the baud
|
||||||
|
rate clock. This is a shift register with the most recent bit in
|
||||||
|
bit 0 and the oldest in bit 15. Only valid when ENRXS is set.
|
||||||
|
'''
|
||||||
|
}
|
||||||
|
{ bits: "16",
|
||||||
|
name: "ENRXS",
|
||||||
|
desc: '''
|
||||||
|
If this bit is set the receive oversampled data is collected
|
||||||
|
in the RXS field.
|
||||||
|
'''
|
||||||
|
}
|
||||||
|
{bits: "20:19", name: "TXILVL",
|
||||||
|
desc: "Trigger level for TX interrupts",
|
||||||
|
resval: "2",
|
||||||
|
enum: [
|
||||||
|
{ value: "0", name: "txlvl1", desc: "1 character" },
|
||||||
|
{ value: "1", name: "txlvl4", desc: "4 characters" },
|
||||||
|
{ value: "2", name: "txlvl8", desc: "8 characters" },
|
||||||
|
{ value: "3", name: "txlvl16", desc: "16 characters" }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
In all of these the swaccess parameter is inherited from the register
|
||||||
|
level, and will be added so this key is always available to the
|
||||||
|
backend. The RXS and ENRXS will default to zero reset value (unless
|
||||||
|
something different is provided for the register) and will have the
|
||||||
|
key added, but TXILVL expicitly sets its reset value as 2.
|
||||||
|
|
||||||
|
The missing bits 17 and 18 will be treated as reserved by the tool, as
|
||||||
|
will any bits between 21 and the maximum in the register.
|
||||||
|
|
||||||
|
The TXILVL is an example using an enumeration to specify all valid
|
||||||
|
values for the field. In this case all possible values are described,
|
||||||
|
if the list is incomplete then the field is marked with the rsvdenum
|
||||||
|
key so the backend can take appropriate action. (If the enum field is
|
||||||
|
more than 7 bits then the checking is not done.)
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
offset_intro = """
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
multi_intro = """
|
||||||
|
|
||||||
|
The multireg expands on the register required fields and will generate
|
||||||
|
a list of the generated registers (that contain all required and
|
||||||
|
generated keys for an actual register).
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
window_intro = """
|
||||||
|
|
||||||
|
A window defines an open region of the register space that can be used
|
||||||
|
for things that are not registers (for example access to a buffer ram).
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
regwen_intro = """
|
||||||
|
|
||||||
|
Registers can protect themselves from software writes by using the
|
||||||
|
register attribute regwen. When not an emptry string (the default
|
||||||
|
value), regwen indicates that another register must be true in order
|
||||||
|
to allow writes to this register. This is useful for the prevention
|
||||||
|
of software modification. The register-enable register (call it
|
||||||
|
REGWEN) must be one bit in width, and should default to 1 and be rw1c
|
||||||
|
for preferred security control. This allows all writes to proceed
|
||||||
|
until at some point software disables future modifications by clearing
|
||||||
|
REGWEN. An error is reported if REGWEN does not exist, contains more
|
||||||
|
than one bit, is not `rw1c` or does not default to 1. One REGWEN can
|
||||||
|
protect multiple registers. The REGWEN register must precede those
|
||||||
|
registers that refer to it in the .hjson register list. An example:
|
||||||
|
|
||||||
|
```hjson
|
||||||
|
{ name: "REGWEN",
|
||||||
|
desc: "Register write enable for a bank of registers",
|
||||||
|
swaccess: "rw1c",
|
||||||
|
fields: [ { bits: "0", resval: "1" } ]
|
||||||
|
}
|
||||||
|
{ name: "REGA",
|
||||||
|
swaccess: "rw",
|
||||||
|
regwen: "REGWEN",
|
||||||
|
...
|
||||||
|
}
|
||||||
|
{ name: "REGB",
|
||||||
|
swaccess: "rw",
|
||||||
|
regwen: "REGWEN",
|
||||||
|
...
|
||||||
|
}
|
||||||
|
```
|
||||||
|
"""
|
||||||
|
|
||||||
|
doc_tail = """
|
||||||
|
|
||||||
|
(end of output generated by `regtool.py --doc`)
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def doc_tbl_head(outfile, use):
|
||||||
|
if use is not None:
|
||||||
|
genout(outfile, "\nKey | Kind | Type | Description of Value\n")
|
||||||
|
genout(outfile, "--- | ---- | ---- | --------------------\n")
|
||||||
|
else:
|
||||||
|
genout(outfile, "\nKey | Description\n")
|
||||||
|
genout(outfile, "--- | -----------\n")
|
||||||
|
|
||||||
|
|
||||||
|
def doc_tbl_line(outfile, key, use, desc):
|
||||||
|
if use is not None:
|
||||||
|
desc_key, desc_txt = desc
|
||||||
|
val_type = (validate.val_types[desc_key][0]
|
||||||
|
if desc_key is not None else None)
|
||||||
|
else:
|
||||||
|
assert isinstance(desc, str)
|
||||||
|
val_type = None
|
||||||
|
desc_txt = desc
|
||||||
|
|
||||||
|
if val_type is not None:
|
||||||
|
genout(
|
||||||
|
outfile, '{} | {} | {} | {}\n'.format(key, validate.key_use[use],
|
||||||
|
val_type, desc_txt))
|
||||||
|
else:
|
||||||
|
genout(outfile, key + " | " + desc_txt + "\n")
|
||||||
|
|
||||||
|
|
||||||
|
def document(outfile):
|
||||||
|
genout(outfile, doc_intro)
|
||||||
|
for x in validate.val_types:
|
||||||
|
genout(
|
||||||
|
outfile,
|
||||||
|
validate.val_types[x][0] + " | " + validate.val_types[x][1] + "\n")
|
||||||
|
|
||||||
|
genout(outfile, swaccess_intro)
|
||||||
|
doc_tbl_head(outfile, None)
|
||||||
|
for key, value in SWACCESS_PERMITTED.items():
|
||||||
|
doc_tbl_line(outfile, key, None, value[0])
|
||||||
|
|
||||||
|
genout(outfile, hwaccess_intro)
|
||||||
|
doc_tbl_head(outfile, None)
|
||||||
|
for key, value in HWACCESS_PERMITTED.items():
|
||||||
|
doc_tbl_line(outfile, key, None, value[0])
|
||||||
|
|
||||||
|
genout(
|
||||||
|
outfile, "\n\nThe top level of the JSON is a group containing "
|
||||||
|
"the following keys:\n")
|
||||||
|
doc_tbl_head(outfile, 1)
|
||||||
|
for k, v in ip_block.REQUIRED_FIELDS.items():
|
||||||
|
doc_tbl_line(outfile, k, 'r', v)
|
||||||
|
for k, v in ip_block.OPTIONAL_FIELDS.items():
|
||||||
|
doc_tbl_line(outfile, k, 'o', v)
|
||||||
|
genout(outfile, top_example)
|
||||||
|
|
||||||
|
genout(
|
||||||
|
outfile, "\n\nThe list of registers includes register definition "
|
||||||
|
"groups containing the following keys:\n")
|
||||||
|
doc_tbl_head(outfile, 1)
|
||||||
|
for k, v in register.REQUIRED_FIELDS.items():
|
||||||
|
doc_tbl_line(outfile, k, 'r', v)
|
||||||
|
for k, v in register.OPTIONAL_FIELDS.items():
|
||||||
|
doc_tbl_line(outfile, k, 'o', v)
|
||||||
|
genout(outfile, register_example)
|
||||||
|
|
||||||
|
genout(
|
||||||
|
outfile, "\n\nIn the fields list each field definition is a group "
|
||||||
|
"itself containing the following keys:\n")
|
||||||
|
doc_tbl_head(outfile, 1)
|
||||||
|
for k, v in field.REQUIRED_FIELDS.items():
|
||||||
|
doc_tbl_line(outfile, k, 'r', v)
|
||||||
|
for k, v in field.OPTIONAL_FIELDS.items():
|
||||||
|
doc_tbl_line(outfile, k, 'o', v)
|
||||||
|
genout(outfile, field_example)
|
||||||
|
|
||||||
|
genout(outfile, "\n\nDefinitions in an enumeration group contain:\n")
|
||||||
|
doc_tbl_head(outfile, 1)
|
||||||
|
for k, v in enum_entry.REQUIRED_FIELDS.items():
|
||||||
|
doc_tbl_line(outfile, k, 'r', v)
|
||||||
|
|
||||||
|
genout(
|
||||||
|
outfile, "\n\nThe list of registers may include single entry groups "
|
||||||
|
"to control the offset, open a window or generate registers:\n")
|
||||||
|
doc_tbl_head(outfile, 1)
|
||||||
|
for x in validate.list_optone:
|
||||||
|
doc_tbl_line(outfile, x, 'o', validate.list_optone[x])
|
||||||
|
|
||||||
|
genout(outfile, offset_intro)
|
||||||
|
genout(outfile, regwen_intro)
|
||||||
|
|
||||||
|
genout(outfile, window_intro)
|
||||||
|
doc_tbl_head(outfile, 1)
|
||||||
|
for k, v in window.REQUIRED_FIELDS.items():
|
||||||
|
doc_tbl_line(outfile, k, 'r', v)
|
||||||
|
for k, v in window.OPTIONAL_FIELDS.items():
|
||||||
|
doc_tbl_line(outfile, k, 'o', v)
|
||||||
|
|
||||||
|
genout(outfile, multi_intro)
|
||||||
|
doc_tbl_head(outfile, 1)
|
||||||
|
for k, v in multi_register.REQUIRED_FIELDS.items():
|
||||||
|
doc_tbl_line(outfile, k, 'r', v)
|
||||||
|
for k, v in multi_register.OPTIONAL_FIELDS.items():
|
||||||
|
doc_tbl_line(outfile, k, 'o', v)
|
||||||
|
|
||||||
|
genout(outfile, doc_tail)
|
|
@ -0,0 +1,83 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
import logging as log
|
||||||
|
import re
|
||||||
|
from typing import List, Match, Optional, Set
|
||||||
|
|
||||||
|
|
||||||
|
def expand_paras(s: str, rnames: Set[str]) -> List[str]:
|
||||||
|
'''Expand a description field to HTML.
|
||||||
|
|
||||||
|
This supports a sort of simple pseudo-markdown. Supported Markdown
|
||||||
|
features:
|
||||||
|
|
||||||
|
- Separate paragraphs on a blank line
|
||||||
|
- **bold** and *italicised* text
|
||||||
|
- Back-ticks for pre-formatted text
|
||||||
|
|
||||||
|
We also generate links to registers when a name is prefixed with a double
|
||||||
|
exclamation mark. For example, if there is a register FOO then !!FOO or
|
||||||
|
!!FOO.field will generate a link to that register.
|
||||||
|
|
||||||
|
Returns a list of rendered paragraphs
|
||||||
|
|
||||||
|
'''
|
||||||
|
# Start by splitting into paragraphs. The regex matches a newline followed
|
||||||
|
# by one or more lines that just contain whitespace. Then render each
|
||||||
|
# paragraph with the _expand_paragraph worker function.
|
||||||
|
paras = [_expand_paragraph(paragraph.strip(), rnames)
|
||||||
|
for paragraph in re.split(r'\n(?:\s*\n)+', s)]
|
||||||
|
|
||||||
|
# There will always be at least one paragraph (splitting an empty string
|
||||||
|
# gives [''])
|
||||||
|
assert paras
|
||||||
|
return paras
|
||||||
|
|
||||||
|
|
||||||
|
def _expand_paragraph(s: str, rnames: Set[str]) -> str:
|
||||||
|
'''Expand a single paragraph, as described in _get_desc_paras'''
|
||||||
|
def fieldsub(match: Match[str]) -> str:
|
||||||
|
base = match.group(1).partition('.')[0].lower()
|
||||||
|
if base in rnames:
|
||||||
|
if match.group(1)[-1] == ".":
|
||||||
|
return ('<a href="#Reg_' + base + '"><code class=\"reg\">' +
|
||||||
|
match.group(1)[:-1] + '</code></a>.')
|
||||||
|
else:
|
||||||
|
return ('<a href="#Reg_' + base + '"><code class=\"reg\">' +
|
||||||
|
match.group(1) + '</code></a>')
|
||||||
|
log.warn('!!' + match.group(1).partition('.')[0] +
|
||||||
|
' not found in register list.')
|
||||||
|
return match.group(0)
|
||||||
|
|
||||||
|
# Split out pre-formatted text. Because the call to re.split has a capture
|
||||||
|
# group in the regex, we get an odd number of results. Elements with even
|
||||||
|
# indices are "normal text". Those with odd indices are the captured text
|
||||||
|
# between the back-ticks.
|
||||||
|
code_split = re.split(r'`([^`]+)`', s)
|
||||||
|
expanded_parts = []
|
||||||
|
|
||||||
|
for idx, part in enumerate(code_split):
|
||||||
|
if idx & 1:
|
||||||
|
# Text contained in back ticks
|
||||||
|
expanded_parts.append('<code>{}</code>'.format(part))
|
||||||
|
continue
|
||||||
|
|
||||||
|
part = re.sub(r"!!([A-Za-z0-9_.]+)", fieldsub, part)
|
||||||
|
part = re.sub(r"(?s)\*\*(.+?)\*\*", r'<B>\1</B>', part)
|
||||||
|
part = re.sub(r"\*([^*]+?)\*", r'<I>\1</I>', part)
|
||||||
|
expanded_parts.append(part)
|
||||||
|
|
||||||
|
return '<p>{}</p>'.format(''.join(expanded_parts))
|
||||||
|
|
||||||
|
|
||||||
|
def render_td(s: str, rnames: Set[str], td_class: Optional[str]) -> str:
|
||||||
|
'''Expand a description field and put it in a <td>.
|
||||||
|
|
||||||
|
Returns a string. See _get_desc_paras for the format that gets expanded.
|
||||||
|
|
||||||
|
'''
|
||||||
|
desc_paras = expand_paras(s, rnames)
|
||||||
|
class_attr = '' if td_class is None else ' class="{}"'.format(td_class)
|
||||||
|
return '<td{}>{}</td>'.format(class_attr, ''.join(desc_paras))
|
|
@ -0,0 +1,81 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
from typing import Dict, Optional
|
||||||
|
|
||||||
|
from .lib import (check_keys, check_name,
|
||||||
|
check_str, check_optional_str, check_int)
|
||||||
|
|
||||||
|
|
||||||
|
class InterSignal:
|
||||||
|
def __init__(self,
|
||||||
|
name: str,
|
||||||
|
desc: Optional[str],
|
||||||
|
struct: str,
|
||||||
|
package: Optional[str],
|
||||||
|
signal_type: str,
|
||||||
|
act: str,
|
||||||
|
width: int,
|
||||||
|
default: Optional[str]):
|
||||||
|
assert 0 < width
|
||||||
|
self.name = name
|
||||||
|
self.desc = desc
|
||||||
|
self.struct = struct
|
||||||
|
self.package = package
|
||||||
|
self.signal_type = signal_type
|
||||||
|
self.act = act
|
||||||
|
self.width = width
|
||||||
|
self.default = default
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_raw(what: str, raw: object) -> 'InterSignal':
|
||||||
|
rd = check_keys(raw, what,
|
||||||
|
['name', 'struct', 'type', 'act'],
|
||||||
|
['desc', 'package', 'width', 'default'])
|
||||||
|
|
||||||
|
name = check_name(rd['name'], 'name field of ' + what)
|
||||||
|
|
||||||
|
r_desc = rd.get('desc')
|
||||||
|
if r_desc is None:
|
||||||
|
desc = None
|
||||||
|
else:
|
||||||
|
desc = check_str(r_desc, 'desc field of ' + what)
|
||||||
|
|
||||||
|
struct = check_str(rd['struct'], 'struct field of ' + what)
|
||||||
|
|
||||||
|
r_package = rd.get('package')
|
||||||
|
if r_package is None or r_package == '':
|
||||||
|
package = None
|
||||||
|
else:
|
||||||
|
package = check_name(r_package, 'package field of ' + what)
|
||||||
|
|
||||||
|
signal_type = check_name(rd['type'], 'type field of ' + what)
|
||||||
|
act = check_name(rd['act'], 'act field of ' + what)
|
||||||
|
width = check_int(rd.get('width', 1), 'width field of ' + what)
|
||||||
|
if width <= 0:
|
||||||
|
raise ValueError('width field of {} is not positive.'.format(what))
|
||||||
|
|
||||||
|
default = check_optional_str(rd.get('default'),
|
||||||
|
'default field of ' + what)
|
||||||
|
|
||||||
|
return InterSignal(name, desc, struct, package,
|
||||||
|
signal_type, act, width, default)
|
||||||
|
|
||||||
|
def _asdict(self) -> Dict[str, object]:
|
||||||
|
ret = {'name': self.name} # type: Dict[str, object]
|
||||||
|
if self.desc is not None:
|
||||||
|
ret['desc'] = self.desc
|
||||||
|
ret['struct'] = self.struct
|
||||||
|
if self.package is not None:
|
||||||
|
ret['package'] = self.package
|
||||||
|
ret['type'] = self.signal_type
|
||||||
|
ret['act'] = self.act
|
||||||
|
ret['width'] = self.width
|
||||||
|
if self.default is not None:
|
||||||
|
ret['default'] = self.default
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def as_dict(self) -> Dict[str, object]:
|
||||||
|
return self._asdict()
|
|
@ -0,0 +1,365 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
'''Code representing an IP block for reggen'''
|
||||||
|
|
||||||
|
from typing import Dict, List, Optional, Sequence, Set, Tuple
|
||||||
|
|
||||||
|
import hjson # type: ignore
|
||||||
|
|
||||||
|
from .alert import Alert
|
||||||
|
from .bus_interfaces import BusInterfaces
|
||||||
|
from .clocking import Clocking, ClockingItem
|
||||||
|
from .inter_signal import InterSignal
|
||||||
|
from .lib import (check_keys, check_name, check_int, check_bool,
|
||||||
|
check_list, check_optional_str)
|
||||||
|
from .params import ReggenParams, LocalParam
|
||||||
|
from .reg_block import RegBlock
|
||||||
|
from .signal import Signal
|
||||||
|
|
||||||
|
|
||||||
|
REQUIRED_FIELDS = {
|
||||||
|
'name': ['s', "name of the component"],
|
||||||
|
'clocking': ['l', "clocking for the device"],
|
||||||
|
'bus_interfaces': ['l', "bus interfaces for the device"],
|
||||||
|
'registers': [
|
||||||
|
'l',
|
||||||
|
"list of register definition groups and "
|
||||||
|
"offset control groups"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
OPTIONAL_FIELDS = {
|
||||||
|
'alert_list': ['lnw', "list of peripheral alerts"],
|
||||||
|
'available_inout_list': ['lnw', "list of available peripheral inouts"],
|
||||||
|
'available_input_list': ['lnw', "list of available peripheral inputs"],
|
||||||
|
'available_output_list': ['lnw', "list of available peripheral outputs"],
|
||||||
|
'expose_reg_if': ['pb', 'if set, expose reg interface in reg2hw signal'],
|
||||||
|
'hier_path': [
|
||||||
|
None,
|
||||||
|
'additional hierarchy path before the reg block instance'
|
||||||
|
],
|
||||||
|
'interrupt_list': ['lnw', "list of peripheral interrupts"],
|
||||||
|
'inter_signal_list': ['l', "list of inter-module signals"],
|
||||||
|
'no_auto_alert_regs': [
|
||||||
|
's', "Set to true to suppress automatic "
|
||||||
|
"generation of alert test registers. "
|
||||||
|
"Defaults to true if no alert_list is present. "
|
||||||
|
"Otherwise this defaults to false. "
|
||||||
|
],
|
||||||
|
'no_auto_intr_regs': [
|
||||||
|
's', "Set to true to suppress automatic "
|
||||||
|
"generation of interrupt registers. "
|
||||||
|
"Defaults to true if no interrupt_list is present. "
|
||||||
|
"Otherwise this defaults to false. "
|
||||||
|
],
|
||||||
|
'param_list': ['lp', "list of parameters of the IP"],
|
||||||
|
'regwidth': ['d', "width of registers in bits (default 32)"],
|
||||||
|
'reset_request_list': ['l', 'list of signals requesting reset'],
|
||||||
|
'scan': ['pb', 'Indicates the module have `scanmode_i`'],
|
||||||
|
'scan_reset': ['pb', 'Indicates the module have `scan_rst_ni`'],
|
||||||
|
'scan_en': ['pb', 'Indicates the module has `scan_en_i`'],
|
||||||
|
'SPDX-License-Identifier': [
|
||||||
|
's', "License ientifier (if using pure json) "
|
||||||
|
"Only use this if unable to put this "
|
||||||
|
"information in a comment at the top of the "
|
||||||
|
"file."
|
||||||
|
],
|
||||||
|
'wakeup_list': ['lnw', "list of peripheral wakeups"]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class IpBlock:
|
||||||
|
def __init__(self,
|
||||||
|
name: str,
|
||||||
|
regwidth: int,
|
||||||
|
params: ReggenParams,
|
||||||
|
reg_blocks: Dict[Optional[str], RegBlock],
|
||||||
|
interrupts: Sequence[Signal],
|
||||||
|
no_auto_intr: bool,
|
||||||
|
alerts: List[Alert],
|
||||||
|
no_auto_alert: bool,
|
||||||
|
scan: bool,
|
||||||
|
inter_signals: List[InterSignal],
|
||||||
|
bus_interfaces: BusInterfaces,
|
||||||
|
hier_path: Optional[str],
|
||||||
|
clocking: Clocking,
|
||||||
|
xputs: Tuple[Sequence[Signal],
|
||||||
|
Sequence[Signal],
|
||||||
|
Sequence[Signal]],
|
||||||
|
wakeups: Sequence[Signal],
|
||||||
|
reset_requests: Sequence[Signal],
|
||||||
|
expose_reg_if: bool,
|
||||||
|
scan_reset: bool,
|
||||||
|
scan_en: bool):
|
||||||
|
assert reg_blocks
|
||||||
|
|
||||||
|
# Check that register blocks are in bijection with device interfaces
|
||||||
|
reg_block_names = reg_blocks.keys()
|
||||||
|
dev_if_names = [] # type: List[Optional[str]]
|
||||||
|
dev_if_names += bus_interfaces.named_devices
|
||||||
|
if bus_interfaces.has_unnamed_device:
|
||||||
|
dev_if_names.append(None)
|
||||||
|
assert set(reg_block_names) == set(dev_if_names)
|
||||||
|
|
||||||
|
self.name = name
|
||||||
|
self.regwidth = regwidth
|
||||||
|
self.reg_blocks = reg_blocks
|
||||||
|
self.params = params
|
||||||
|
self.interrupts = interrupts
|
||||||
|
self.no_auto_intr = no_auto_intr
|
||||||
|
self.alerts = alerts
|
||||||
|
self.no_auto_alert = no_auto_alert
|
||||||
|
self.scan = scan
|
||||||
|
self.inter_signals = inter_signals
|
||||||
|
self.bus_interfaces = bus_interfaces
|
||||||
|
self.hier_path = hier_path
|
||||||
|
self.clocking = clocking
|
||||||
|
self.xputs = xputs
|
||||||
|
self.wakeups = wakeups
|
||||||
|
self.reset_requests = reset_requests
|
||||||
|
self.expose_reg_if = expose_reg_if
|
||||||
|
self.scan_reset = scan_reset
|
||||||
|
self.scan_en = scan_en
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_raw(param_defaults: List[Tuple[str, str]],
|
||||||
|
raw: object,
|
||||||
|
where: str) -> 'IpBlock':
|
||||||
|
|
||||||
|
rd = check_keys(raw, 'block at ' + where,
|
||||||
|
list(REQUIRED_FIELDS.keys()),
|
||||||
|
list(OPTIONAL_FIELDS.keys()))
|
||||||
|
|
||||||
|
name = check_name(rd['name'], 'name of block at ' + where)
|
||||||
|
|
||||||
|
what = '{} block at {}'.format(name, where)
|
||||||
|
|
||||||
|
r_regwidth = rd.get('regwidth')
|
||||||
|
if r_regwidth is None:
|
||||||
|
regwidth = 32
|
||||||
|
else:
|
||||||
|
regwidth = check_int(r_regwidth, 'regwidth field of ' + what)
|
||||||
|
if regwidth <= 0:
|
||||||
|
raise ValueError('Invalid regwidth field for {}: '
|
||||||
|
'{} is not positive.'
|
||||||
|
.format(what, regwidth))
|
||||||
|
|
||||||
|
params = ReggenParams.from_raw('parameter list for ' + what,
|
||||||
|
rd.get('param_list', []))
|
||||||
|
try:
|
||||||
|
params.apply_defaults(param_defaults)
|
||||||
|
except (ValueError, KeyError) as err:
|
||||||
|
raise ValueError('Failed to apply defaults to params: {}'
|
||||||
|
.format(err)) from None
|
||||||
|
|
||||||
|
init_block = RegBlock(regwidth, params)
|
||||||
|
|
||||||
|
interrupts = Signal.from_raw_list('interrupt_list for block {}'
|
||||||
|
.format(name),
|
||||||
|
rd.get('interrupt_list', []))
|
||||||
|
alerts = Alert.from_raw_list('alert_list for block {}'
|
||||||
|
.format(name),
|
||||||
|
rd.get('alert_list', []))
|
||||||
|
|
||||||
|
no_auto_intr = check_bool(rd.get('no_auto_intr_regs', not interrupts),
|
||||||
|
'no_auto_intr_regs field of ' + what)
|
||||||
|
|
||||||
|
no_auto_alert = check_bool(rd.get('no_auto_alert_regs', not alerts),
|
||||||
|
'no_auto_alert_regs field of ' + what)
|
||||||
|
|
||||||
|
if interrupts and not no_auto_intr:
|
||||||
|
if interrupts[-1].bits.msb >= regwidth:
|
||||||
|
raise ValueError("Interrupt list for {} is too wide: "
|
||||||
|
"msb is {}, which doesn't fit with a "
|
||||||
|
"regwidth of {}."
|
||||||
|
.format(what,
|
||||||
|
interrupts[-1].bits.msb, regwidth))
|
||||||
|
init_block.make_intr_regs(interrupts)
|
||||||
|
|
||||||
|
if alerts:
|
||||||
|
if not no_auto_alert:
|
||||||
|
if len(alerts) > regwidth:
|
||||||
|
raise ValueError("Interrupt list for {} is too wide: "
|
||||||
|
"{} alerts don't fit with a regwidth of {}."
|
||||||
|
.format(what, len(alerts), regwidth))
|
||||||
|
init_block.make_alert_regs(alerts)
|
||||||
|
|
||||||
|
# Generate a NumAlerts parameter
|
||||||
|
existing_param = params.get('NumAlerts')
|
||||||
|
if existing_param is not None:
|
||||||
|
if ((not isinstance(existing_param, LocalParam) or
|
||||||
|
existing_param.param_type != 'int' or
|
||||||
|
existing_param.value != str(len(alerts)))):
|
||||||
|
raise ValueError('Conflicting definition of NumAlerts '
|
||||||
|
'parameter.')
|
||||||
|
else:
|
||||||
|
params.add(LocalParam(name='NumAlerts',
|
||||||
|
desc='Number of alerts',
|
||||||
|
param_type='int',
|
||||||
|
value=str(len(alerts))))
|
||||||
|
|
||||||
|
scan = check_bool(rd.get('scan', False), 'scan field of ' + what)
|
||||||
|
|
||||||
|
reg_blocks = RegBlock.build_blocks(init_block, rd['registers'])
|
||||||
|
|
||||||
|
r_inter_signals = check_list(rd.get('inter_signal_list', []),
|
||||||
|
'inter_signal_list field')
|
||||||
|
inter_signals = [
|
||||||
|
InterSignal.from_raw('entry {} of the inter_signal_list field'
|
||||||
|
.format(idx + 1),
|
||||||
|
entry)
|
||||||
|
for idx, entry in enumerate(r_inter_signals)
|
||||||
|
]
|
||||||
|
|
||||||
|
bus_interfaces = (BusInterfaces.
|
||||||
|
from_raw(rd['bus_interfaces'],
|
||||||
|
'bus_interfaces field of ' + where))
|
||||||
|
inter_signals += bus_interfaces.inter_signals()
|
||||||
|
|
||||||
|
hier_path = check_optional_str(rd.get('hier_path', None),
|
||||||
|
'hier_path field of ' + what)
|
||||||
|
|
||||||
|
clocking = Clocking.from_raw(rd['clocking'],
|
||||||
|
'clocking field of ' + what)
|
||||||
|
|
||||||
|
xputs = (
|
||||||
|
Signal.from_raw_list('available_inout_list for block ' + name,
|
||||||
|
rd.get('available_inout_list', [])),
|
||||||
|
Signal.from_raw_list('available_input_list for block ' + name,
|
||||||
|
rd.get('available_input_list', [])),
|
||||||
|
Signal.from_raw_list('available_output_list for block ' + name,
|
||||||
|
rd.get('available_output_list', []))
|
||||||
|
)
|
||||||
|
wakeups = Signal.from_raw_list('wakeup_list for block ' + name,
|
||||||
|
rd.get('wakeup_list', []))
|
||||||
|
rst_reqs = Signal.from_raw_list('reset_request_list for block ' + name,
|
||||||
|
rd.get('reset_request_list', []))
|
||||||
|
|
||||||
|
expose_reg_if = check_bool(rd.get('expose_reg_if', False),
|
||||||
|
'expose_reg_if field of ' + what)
|
||||||
|
|
||||||
|
scan_reset = check_bool(rd.get('scan_reset', False),
|
||||||
|
'scan_reset field of ' + what)
|
||||||
|
|
||||||
|
scan_en = check_bool(rd.get('scan_en', False),
|
||||||
|
'scan_en field of ' + what)
|
||||||
|
|
||||||
|
# Check that register blocks are in bijection with device interfaces
|
||||||
|
reg_block_names = reg_blocks.keys()
|
||||||
|
dev_if_names = [] # type: List[Optional[str]]
|
||||||
|
dev_if_names += bus_interfaces.named_devices
|
||||||
|
if bus_interfaces.has_unnamed_device:
|
||||||
|
dev_if_names.append(None)
|
||||||
|
if set(reg_block_names) != set(dev_if_names):
|
||||||
|
raise ValueError("IP block {} defines device interfaces, named {} "
|
||||||
|
"but its registers don't match (they are keyed "
|
||||||
|
"by {})."
|
||||||
|
.format(name, dev_if_names,
|
||||||
|
list(reg_block_names)))
|
||||||
|
|
||||||
|
return IpBlock(name, regwidth, params, reg_blocks,
|
||||||
|
interrupts, no_auto_intr, alerts, no_auto_alert,
|
||||||
|
scan, inter_signals, bus_interfaces,
|
||||||
|
hier_path, clocking, xputs,
|
||||||
|
wakeups, rst_reqs, expose_reg_if, scan_reset, scan_en)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_text(txt: str,
|
||||||
|
param_defaults: List[Tuple[str, str]],
|
||||||
|
where: str) -> 'IpBlock':
|
||||||
|
'''Load an IpBlock from an hjson description in txt'''
|
||||||
|
return IpBlock.from_raw(param_defaults,
|
||||||
|
hjson.loads(txt, use_decimal=True),
|
||||||
|
where)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_path(path: str,
|
||||||
|
param_defaults: List[Tuple[str, str]]) -> 'IpBlock':
|
||||||
|
'''Load an IpBlock from an hjson description in a file at path'''
|
||||||
|
with open(path, 'r', encoding='utf-8') as handle:
|
||||||
|
return IpBlock.from_text(handle.read(), param_defaults,
|
||||||
|
'file at {!r}'.format(path))
|
||||||
|
|
||||||
|
def _asdict(self) -> Dict[str, object]:
|
||||||
|
ret = {
|
||||||
|
'name': self.name,
|
||||||
|
'regwidth': self.regwidth
|
||||||
|
}
|
||||||
|
if len(self.reg_blocks) == 1 and None in self.reg_blocks:
|
||||||
|
ret['registers'] = self.reg_blocks[None].as_dicts()
|
||||||
|
else:
|
||||||
|
ret['registers'] = {k: v.as_dicts()
|
||||||
|
for k, v in self.reg_blocks.items()}
|
||||||
|
|
||||||
|
ret['param_list'] = self.params.as_dicts()
|
||||||
|
ret['interrupt_list'] = self.interrupts
|
||||||
|
ret['no_auto_intr_regs'] = self.no_auto_intr
|
||||||
|
ret['alert_list'] = self.alerts
|
||||||
|
ret['no_auto_alert_regs'] = self.no_auto_alert
|
||||||
|
ret['scan'] = self.scan
|
||||||
|
ret['inter_signal_list'] = self.inter_signals
|
||||||
|
ret['bus_interfaces'] = self.bus_interfaces.as_dicts()
|
||||||
|
|
||||||
|
if self.hier_path is not None:
|
||||||
|
ret['hier_path'] = self.hier_path
|
||||||
|
|
||||||
|
ret['clocking'] = self.clocking.items
|
||||||
|
|
||||||
|
inouts, inputs, outputs = self.xputs
|
||||||
|
if inouts:
|
||||||
|
ret['available_inout_list'] = inouts
|
||||||
|
if inputs:
|
||||||
|
ret['available_input_list'] = inputs
|
||||||
|
if outputs:
|
||||||
|
ret['available_output_list'] = outputs
|
||||||
|
|
||||||
|
if self.wakeups:
|
||||||
|
ret['wakeup_list'] = self.wakeups
|
||||||
|
if self.reset_requests:
|
||||||
|
ret['reset_request_list'] = self.reset_requests
|
||||||
|
|
||||||
|
ret['scan_reset'] = self.scan_reset
|
||||||
|
ret['scan_en'] = self.scan_en
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def get_rnames(self) -> Set[str]:
|
||||||
|
ret = set() # type: Set[str]
|
||||||
|
for rb in self.reg_blocks.values():
|
||||||
|
ret = ret.union(set(rb.name_to_offset.keys()))
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def get_signals_as_list_of_dicts(self) -> List[Dict[str, object]]:
|
||||||
|
'''Look up and return signal by name'''
|
||||||
|
result = []
|
||||||
|
for iodir, xput in zip(('inout', 'input', 'output'), self.xputs):
|
||||||
|
for sig in xput:
|
||||||
|
result.append(sig.as_nwt_dict(iodir))
|
||||||
|
return result
|
||||||
|
|
||||||
|
def get_signal_by_name_as_dict(self, name: str) -> Dict[str, object]:
|
||||||
|
'''Look up and return signal by name'''
|
||||||
|
sig_list = self.get_signals_as_list_of_dicts()
|
||||||
|
for sig in sig_list:
|
||||||
|
if sig['name'] == name:
|
||||||
|
return sig
|
||||||
|
else:
|
||||||
|
raise ValueError("Signal {} does not exist in IP block {}"
|
||||||
|
.format(name, self.name))
|
||||||
|
|
||||||
|
def has_shadowed_reg(self) -> bool:
|
||||||
|
'''Return boolean indication whether reg block contains shadowed registers'''
|
||||||
|
|
||||||
|
for rb in self.reg_blocks.values():
|
||||||
|
if rb.has_shadowed_reg():
|
||||||
|
return True
|
||||||
|
|
||||||
|
# if we are here, then no one has has a shadowed register
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_primary_clock(self) -> ClockingItem:
|
||||||
|
'''Return primary clock of an block'''
|
||||||
|
|
||||||
|
return self.clocking.primary
|
|
@ -0,0 +1,254 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
'''Parsing support code for reggen'''
|
||||||
|
|
||||||
|
import re
|
||||||
|
from typing import Dict, List, Optional, cast
|
||||||
|
|
||||||
|
|
||||||
|
# Names that are prohibited (used as reserved keywords in systemverilog)
|
||||||
|
_VERILOG_KEYWORDS = {
|
||||||
|
'alias', 'always', 'always_comb', 'always_ff', 'always_latch', 'and',
|
||||||
|
'assert', 'assign', 'assume', 'automatic', 'before', 'begin', 'bind',
|
||||||
|
'bins', 'binsof', 'bit', 'break', 'buf', 'bufif0', 'bufif1', 'byte',
|
||||||
|
'case', 'casex', 'casez', 'cell', 'chandle', 'class', 'clocking', 'cmos',
|
||||||
|
'config', 'const', 'constraint', 'context', 'continue', 'cover',
|
||||||
|
'covergroup', 'coverpoint', 'cross', 'deassign', 'default', 'defparam',
|
||||||
|
'design', 'disable', 'dist', 'do', 'edge', 'else', 'end', 'endcase',
|
||||||
|
'endclass', 'endclocking', 'endconfig', 'endfunction', 'endgenerate',
|
||||||
|
'endgroup', 'endinterface', 'endmodule', 'endpackage', 'endprimitive',
|
||||||
|
'endprogram', 'endproperty', 'endspecify', 'endsequence', 'endtable',
|
||||||
|
'endtask', 'enum', 'event', 'expect', 'export', 'extends', 'extern',
|
||||||
|
'final', 'first_match', 'for', 'force', 'foreach', 'forever', 'fork',
|
||||||
|
'forkjoin', 'function', 'generate', 'genvar', 'highz0', 'highz1', 'if',
|
||||||
|
'iff', 'ifnone', 'ignore_bins', 'illegal_bins', 'import', 'incdir',
|
||||||
|
'include', 'initial', 'inout', 'input', 'inside', 'instance', 'int',
|
||||||
|
'integer', 'interface', 'intersect', 'join', 'join_any', 'join_none',
|
||||||
|
'large', 'liblist', 'library', 'local', 'localparam', 'logic', 'longint',
|
||||||
|
'macromodule', 'matches', 'medium', 'modport', 'module', 'nand', 'negedge',
|
||||||
|
'new', 'nmos', 'nor', 'noshowcancelled', 'not', 'notif0', 'notif1', 'null',
|
||||||
|
'or', 'output', 'package', 'packed', 'parameter', 'pmos', 'posedge',
|
||||||
|
'primitive', 'priority', 'program', 'property', 'protected', 'pull0',
|
||||||
|
'pull1', 'pulldown', 'pullup', 'pulsestyle_onevent', 'pulsestyle_ondetect',
|
||||||
|
'pure', 'rand', 'randc', 'randcase', 'randsequence', 'rcmos', 'real',
|
||||||
|
'realtime', 'ref', 'reg', 'release', 'repeat', 'return', 'rnmos', 'rpmos',
|
||||||
|
'rtran', 'rtranif0', 'rtranif1', 'scalared', 'sequence', 'shortint',
|
||||||
|
'shortreal', 'showcancelled', 'signed', 'small', 'solve', 'specify',
|
||||||
|
'specparam', 'static', 'string', 'strong0', 'strong1', 'struct', 'super',
|
||||||
|
'supply0', 'supply1', 'table', 'tagged', 'task', 'this', 'throughout',
|
||||||
|
'time', 'timeprecision', 'timeunit', 'tran', 'tranif0', 'tranif1', 'tri',
|
||||||
|
'tri0', 'tri1', 'triand', 'trior', 'trireg', 'type', 'typedef', 'union',
|
||||||
|
'unique', 'unsigned', 'use', 'uwire', 'var', 'vectored', 'virtual', 'void',
|
||||||
|
'wait', 'wait_order', 'wand', 'weak0', 'weak1', 'while', 'wildcard',
|
||||||
|
'wire', 'with', 'within', 'wor', 'xnor', 'xor'
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def check_str_dict(obj: object, what: str) -> Dict[str, object]:
|
||||||
|
if not isinstance(obj, dict):
|
||||||
|
raise ValueError("{} is expected to be a dict, but was actually a {}."
|
||||||
|
.format(what, type(obj).__name__))
|
||||||
|
|
||||||
|
for key in obj:
|
||||||
|
if not isinstance(key, str):
|
||||||
|
raise ValueError('{} has a key {!r}, which is not a string.'
|
||||||
|
.format(what, key))
|
||||||
|
|
||||||
|
return cast(Dict[str, object], obj)
|
||||||
|
|
||||||
|
|
||||||
|
def check_keys(obj: object,
|
||||||
|
what: str,
|
||||||
|
required_keys: List[str],
|
||||||
|
optional_keys: List[str]) -> Dict[str, object]:
|
||||||
|
'''Check that obj is a dict object with the expected keys
|
||||||
|
|
||||||
|
If not, raise a ValueError; the what argument names the object.
|
||||||
|
|
||||||
|
'''
|
||||||
|
od = check_str_dict(obj, what)
|
||||||
|
|
||||||
|
allowed = set()
|
||||||
|
missing = []
|
||||||
|
for key in required_keys:
|
||||||
|
assert key not in allowed
|
||||||
|
allowed.add(key)
|
||||||
|
if key not in od:
|
||||||
|
missing.append(key)
|
||||||
|
|
||||||
|
for key in optional_keys:
|
||||||
|
assert key not in allowed
|
||||||
|
allowed.add(key)
|
||||||
|
|
||||||
|
unexpected = []
|
||||||
|
for key in od:
|
||||||
|
if key not in allowed:
|
||||||
|
unexpected.append(key)
|
||||||
|
|
||||||
|
if missing or unexpected:
|
||||||
|
mstr = ('The following required fields were missing: {}.'
|
||||||
|
.format(', '.join(missing)) if missing else '')
|
||||||
|
ustr = ('The following unexpected fields were found: {}.'
|
||||||
|
.format(', '.join(unexpected)) if unexpected else '')
|
||||||
|
raise ValueError("{} doesn't have the right keys. {}{}{}"
|
||||||
|
.format(what,
|
||||||
|
mstr,
|
||||||
|
' ' if mstr and ustr else '',
|
||||||
|
ustr))
|
||||||
|
|
||||||
|
return od
|
||||||
|
|
||||||
|
|
||||||
|
def check_str(obj: object, what: str) -> str:
|
||||||
|
'''Check that the given object is a string
|
||||||
|
|
||||||
|
If not, raise a ValueError; the what argument names the object.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if not isinstance(obj, str):
|
||||||
|
raise ValueError('{} is of type {}, not a string.'
|
||||||
|
.format(what, type(obj).__name__))
|
||||||
|
return obj
|
||||||
|
|
||||||
|
|
||||||
|
def check_name(obj: object, what: str) -> str:
|
||||||
|
'''Check that obj is a string that's a valid name.
|
||||||
|
|
||||||
|
If not, raise a ValueError; the what argument names the object.
|
||||||
|
|
||||||
|
'''
|
||||||
|
as_str = check_str(obj, what)
|
||||||
|
|
||||||
|
# Allow the usual symbol constituents (alphanumeric plus underscore; no
|
||||||
|
# leading numbers)
|
||||||
|
if not re.match(r'[a-zA-Z_][a-zA-Z_0-9]*$', as_str):
|
||||||
|
raise ValueError("{} is {!r}, which isn't a valid symbol in "
|
||||||
|
"C / Verilog, so isn't allowed as a name."
|
||||||
|
.format(what, as_str))
|
||||||
|
|
||||||
|
# Also check that this isn't a reserved word.
|
||||||
|
if as_str in _VERILOG_KEYWORDS:
|
||||||
|
raise ValueError("{} is {!r}, which is a reserved word in "
|
||||||
|
"SystemVerilog, so isn't allowed as a name."
|
||||||
|
.format(what, as_str))
|
||||||
|
|
||||||
|
return as_str
|
||||||
|
|
||||||
|
|
||||||
|
def check_bool(obj: object, what: str) -> bool:
|
||||||
|
'''Check that obj is a bool or a string that parses to a bool.
|
||||||
|
|
||||||
|
If not, raise a ValueError; the what argument names the object.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if isinstance(obj, str):
|
||||||
|
as_bool = {
|
||||||
|
'true': True,
|
||||||
|
'false': False,
|
||||||
|
'1': True,
|
||||||
|
'0': False
|
||||||
|
}.get(obj.lower())
|
||||||
|
if as_bool is None:
|
||||||
|
raise ValueError('{} is {!r}, which cannot be parsed as a bool.'
|
||||||
|
.format(what, obj))
|
||||||
|
return as_bool
|
||||||
|
|
||||||
|
if obj is True or obj is False:
|
||||||
|
return obj
|
||||||
|
|
||||||
|
raise ValueError('{} is of type {}, not a bool.'
|
||||||
|
.format(what, type(obj).__name__))
|
||||||
|
|
||||||
|
|
||||||
|
def check_list(obj: object, what: str) -> List[object]:
|
||||||
|
'''Check that the given object is a list
|
||||||
|
|
||||||
|
If not, raise a ValueError; the what argument names the object.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if not isinstance(obj, list):
|
||||||
|
raise ValueError('{} is of type {}, not a list.'
|
||||||
|
.format(what, type(obj).__name__))
|
||||||
|
return obj
|
||||||
|
|
||||||
|
|
||||||
|
def check_str_list(obj: object, what: str) -> List[str]:
|
||||||
|
'''Check that the given object is a list of strings
|
||||||
|
|
||||||
|
If not, raise a ValueError; the what argument names the object.
|
||||||
|
|
||||||
|
'''
|
||||||
|
lst = check_list(obj, what)
|
||||||
|
for idx, elt in enumerate(lst):
|
||||||
|
if not isinstance(elt, str):
|
||||||
|
raise ValueError('Element {} of {} is of type {}, '
|
||||||
|
'not a string.'
|
||||||
|
.format(idx, what, type(elt).__name__))
|
||||||
|
return cast(List[str], lst)
|
||||||
|
|
||||||
|
|
||||||
|
def check_int(obj: object, what: str) -> int:
|
||||||
|
'''Check that obj is an integer or a string that parses to an integer.
|
||||||
|
|
||||||
|
If not, raise a ValueError; the what argument names the object.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if isinstance(obj, int):
|
||||||
|
return obj
|
||||||
|
|
||||||
|
if isinstance(obj, str):
|
||||||
|
try:
|
||||||
|
return int(obj, 0)
|
||||||
|
except ValueError:
|
||||||
|
raise ValueError('{} is {!r}, which cannot be parsed as an int.'
|
||||||
|
.format(what, obj)) from None
|
||||||
|
|
||||||
|
raise ValueError('{} is of type {}, not an integer.'
|
||||||
|
.format(what, type(obj).__name__))
|
||||||
|
|
||||||
|
|
||||||
|
def check_xint(obj: object, what: str) -> Optional[int]:
|
||||||
|
'''Check that obj is an integer, a string that parses to an integer or "x".
|
||||||
|
|
||||||
|
On success, return an integer value if there is one or None if the value
|
||||||
|
was 'x'. On failure, raise a ValueError; the what argument names the
|
||||||
|
object.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if isinstance(obj, int):
|
||||||
|
return obj
|
||||||
|
|
||||||
|
if isinstance(obj, str):
|
||||||
|
if obj == 'x':
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
return int(obj, 0)
|
||||||
|
except ValueError:
|
||||||
|
raise ValueError('{} is {!r}, which is not "x", '
|
||||||
|
'nor can it be parsed as an int.'
|
||||||
|
.format(what, obj)) from None
|
||||||
|
|
||||||
|
raise ValueError('{} is of type {}, not an integer.'
|
||||||
|
.format(what, type(obj).__name__))
|
||||||
|
|
||||||
|
|
||||||
|
def check_optional_str(obj: object, what: str) -> Optional[str]:
|
||||||
|
'''Check that obj is a string or None'''
|
||||||
|
return None if obj is None else check_str(obj, what)
|
||||||
|
|
||||||
|
|
||||||
|
def check_optional_name(obj: object, what: str) -> Optional[str]:
|
||||||
|
'''Check that obj is a valid name or None'''
|
||||||
|
return None if obj is None else check_name(obj, what)
|
||||||
|
|
||||||
|
|
||||||
|
def get_basename(name: str) -> str:
|
||||||
|
'''Strip trailing _number (used as multireg suffix) from name'''
|
||||||
|
# TODO: This is a workaround, should solve this as part of parsing a
|
||||||
|
# multi-reg.
|
||||||
|
match = re.search(r'_[0-9]+$', name)
|
||||||
|
assert match
|
||||||
|
assert match.start() > 0
|
||||||
|
return name[0:match.start()]
|
|
@ -0,0 +1,150 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
from typing import Dict, List
|
||||||
|
|
||||||
|
from reggen import register
|
||||||
|
from .field import Field
|
||||||
|
from .lib import check_keys, check_str, check_name, check_bool
|
||||||
|
from .params import ReggenParams
|
||||||
|
from .reg_base import RegBase
|
||||||
|
from .register import Register
|
||||||
|
|
||||||
|
REQUIRED_FIELDS = {
|
||||||
|
'name': ['s', "base name of the registers"],
|
||||||
|
'desc': ['t', "description of the registers"],
|
||||||
|
'count': [
|
||||||
|
's', "number of instances to generate."
|
||||||
|
" This field can be integer or string matching"
|
||||||
|
" from param_list."
|
||||||
|
],
|
||||||
|
'cname': [
|
||||||
|
's', "base name for each instance, mostly"
|
||||||
|
" useful for referring to instance in messages."
|
||||||
|
],
|
||||||
|
'fields': [
|
||||||
|
'l', "list of register field description"
|
||||||
|
" groups. Describes bit positions used for"
|
||||||
|
" base instance."
|
||||||
|
]
|
||||||
|
}
|
||||||
|
OPTIONAL_FIELDS = register.OPTIONAL_FIELDS.copy()
|
||||||
|
OPTIONAL_FIELDS.update({
|
||||||
|
'regwen_multi': [
|
||||||
|
'pb', "If true, regwen term increments"
|
||||||
|
" along with current multireg count."
|
||||||
|
],
|
||||||
|
'compact': [
|
||||||
|
'pb', "If true, allow multireg compacting."
|
||||||
|
"If false, do not compact."
|
||||||
|
]
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
class MultiRegister(RegBase):
|
||||||
|
def __init__(self,
|
||||||
|
offset: int,
|
||||||
|
addrsep: int,
|
||||||
|
reg_width: int,
|
||||||
|
params: ReggenParams,
|
||||||
|
raw: object):
|
||||||
|
super().__init__(offset)
|
||||||
|
|
||||||
|
rd = check_keys(raw, 'multireg',
|
||||||
|
list(REQUIRED_FIELDS.keys()),
|
||||||
|
list(OPTIONAL_FIELDS.keys()))
|
||||||
|
|
||||||
|
# Now that we've checked the schema of rd, we make a "reg" version of
|
||||||
|
# it that removes any fields that are allowed by MultiRegister but
|
||||||
|
# aren't allowed by Register. We'll pass that to the register factory
|
||||||
|
# method.
|
||||||
|
reg_allowed_keys = (set(register.REQUIRED_FIELDS.keys()) |
|
||||||
|
set(register.OPTIONAL_FIELDS.keys()))
|
||||||
|
reg_rd = {key: value
|
||||||
|
for key, value in rd.items()
|
||||||
|
if key in reg_allowed_keys}
|
||||||
|
self.reg = Register.from_raw(reg_width, offset, params, reg_rd)
|
||||||
|
|
||||||
|
self.cname = check_name(rd['cname'],
|
||||||
|
'cname field of multireg {}'
|
||||||
|
.format(self.reg.name))
|
||||||
|
self.name = self.reg.name
|
||||||
|
|
||||||
|
self.regwen_multi = check_bool(rd.get('regwen_multi', False),
|
||||||
|
'regwen_multi field of multireg {}'
|
||||||
|
.format(self.reg.name))
|
||||||
|
|
||||||
|
default_compact = True if len(self.reg.fields) == 1 else False
|
||||||
|
self.compact = check_bool(rd.get('compact', default_compact),
|
||||||
|
'compact field of multireg {}'
|
||||||
|
.format(self.reg.name))
|
||||||
|
if self.compact and len(self.reg.fields) > 1:
|
||||||
|
raise ValueError('Multireg {} sets the compact flag '
|
||||||
|
'but has multiple fields.'
|
||||||
|
.format(self.reg.name))
|
||||||
|
|
||||||
|
count_str = check_str(rd['count'],
|
||||||
|
'count field of multireg {}'
|
||||||
|
.format(self.reg.name))
|
||||||
|
self.count = params.expand(count_str,
|
||||||
|
'count field of multireg ' + self.reg.name)
|
||||||
|
if self.count <= 0:
|
||||||
|
raise ValueError("Multireg {} has a count of {}, "
|
||||||
|
"which isn't positive."
|
||||||
|
.format(self.reg.name, self.count))
|
||||||
|
|
||||||
|
# Generate the registers that this multireg expands into. Here, a
|
||||||
|
# "creg" is a "compacted register", which might contain multiple actual
|
||||||
|
# registers.
|
||||||
|
if self.compact:
|
||||||
|
assert len(self.reg.fields) == 1
|
||||||
|
width_per_reg = self.reg.fields[0].bits.msb + 1
|
||||||
|
assert width_per_reg <= reg_width
|
||||||
|
regs_per_creg = reg_width // width_per_reg
|
||||||
|
else:
|
||||||
|
regs_per_creg = 1
|
||||||
|
|
||||||
|
self.regs = []
|
||||||
|
creg_count = (self.count + regs_per_creg - 1) // regs_per_creg
|
||||||
|
for creg_idx in range(creg_count):
|
||||||
|
min_reg_idx = regs_per_creg * creg_idx
|
||||||
|
max_reg_idx = min(min_reg_idx + regs_per_creg, self.count) - 1
|
||||||
|
creg_offset = offset + creg_idx * addrsep
|
||||||
|
|
||||||
|
reg = self.reg.make_multi(reg_width,
|
||||||
|
creg_offset, creg_idx, creg_count,
|
||||||
|
self.regwen_multi, self.compact,
|
||||||
|
min_reg_idx, max_reg_idx, self.cname)
|
||||||
|
self.regs.append(reg)
|
||||||
|
|
||||||
|
# dv_compact is true if the multireg can be equally divided, and we can
|
||||||
|
# pack them as an array
|
||||||
|
if self.count < regs_per_creg or (self.count % regs_per_creg) == 0:
|
||||||
|
self.dv_compact = True
|
||||||
|
else:
|
||||||
|
self.dv_compact = False
|
||||||
|
|
||||||
|
def next_offset(self, addrsep: int) -> int:
|
||||||
|
return self.offset + len(self.regs) * addrsep
|
||||||
|
|
||||||
|
def get_n_bits(self, bittype: List[str] = ["q"]) -> int:
|
||||||
|
return sum(reg.get_n_bits(bittype) for reg in self.regs)
|
||||||
|
|
||||||
|
def get_field_list(self) -> List[Field]:
|
||||||
|
ret = []
|
||||||
|
for reg in self.regs:
|
||||||
|
ret += reg.get_field_list()
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def is_homogeneous(self) -> bool:
|
||||||
|
return self.reg.is_homogeneous()
|
||||||
|
|
||||||
|
def _asdict(self) -> Dict[str, object]:
|
||||||
|
rd = self.reg._asdict()
|
||||||
|
rd['count'] = str(self.count)
|
||||||
|
rd['cname'] = self.cname
|
||||||
|
rd['regwen_multi'] = str(self.regwen_multi)
|
||||||
|
rd['compact'] = str(self.compact)
|
||||||
|
|
||||||
|
return {'multireg': rd}
|
|
@ -0,0 +1,384 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
import re
|
||||||
|
from collections.abc import MutableMapping
|
||||||
|
from typing import Dict, Iterator, List, Optional, Tuple
|
||||||
|
|
||||||
|
from .lib import check_keys, check_str, check_int, check_bool, check_list
|
||||||
|
|
||||||
|
REQUIRED_FIELDS = {
|
||||||
|
'name': ['s', "name of the item"],
|
||||||
|
}
|
||||||
|
|
||||||
|
OPTIONAL_FIELDS = {
|
||||||
|
'desc': ['s', "description of the item"],
|
||||||
|
'type': ['s', "item type. int by default"],
|
||||||
|
'default': ['s', "item default value"],
|
||||||
|
'local': ['pb', "to be localparam"],
|
||||||
|
'expose': ['pb', "to be exposed to top"],
|
||||||
|
'randcount': [
|
||||||
|
's', "number of bits to randomize in the parameter. 0 by default."
|
||||||
|
],
|
||||||
|
'randtype': ['s', "type of randomization to perform. none by default"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class BaseParam:
|
||||||
|
def __init__(self, name: str, desc: Optional[str], param_type: str):
|
||||||
|
self.name = name
|
||||||
|
self.desc = desc
|
||||||
|
self.param_type = param_type
|
||||||
|
|
||||||
|
def apply_default(self, value: str) -> None:
|
||||||
|
if self.param_type[:3] == 'int':
|
||||||
|
check_int(value,
|
||||||
|
'default value for parameter {} '
|
||||||
|
'(which has type {})'
|
||||||
|
.format(self.name, self.param_type))
|
||||||
|
self.default = value
|
||||||
|
|
||||||
|
def as_dict(self) -> Dict[str, object]:
|
||||||
|
rd = {} # type: Dict[str, object]
|
||||||
|
rd['name'] = self.name
|
||||||
|
if self.desc is not None:
|
||||||
|
rd['desc'] = self.desc
|
||||||
|
rd['type'] = self.param_type
|
||||||
|
return rd
|
||||||
|
|
||||||
|
|
||||||
|
class LocalParam(BaseParam):
|
||||||
|
def __init__(self,
|
||||||
|
name: str,
|
||||||
|
desc: Optional[str],
|
||||||
|
param_type: str,
|
||||||
|
value: str):
|
||||||
|
super().__init__(name, desc, param_type)
|
||||||
|
self.value = value
|
||||||
|
|
||||||
|
def expand_value(self, when: str) -> int:
|
||||||
|
try:
|
||||||
|
return int(self.value, 0)
|
||||||
|
except ValueError:
|
||||||
|
raise ValueError("When {}, the {} value expanded as "
|
||||||
|
"{}, which doesn't parse as an integer."
|
||||||
|
.format(when, self.name, self.value)) from None
|
||||||
|
|
||||||
|
def as_dict(self) -> Dict[str, object]:
|
||||||
|
rd = super().as_dict()
|
||||||
|
rd['local'] = True
|
||||||
|
rd['default'] = self.value
|
||||||
|
return rd
|
||||||
|
|
||||||
|
|
||||||
|
class Parameter(BaseParam):
|
||||||
|
def __init__(self,
|
||||||
|
name: str,
|
||||||
|
desc: Optional[str],
|
||||||
|
param_type: str,
|
||||||
|
default: str,
|
||||||
|
expose: bool):
|
||||||
|
super().__init__(name, desc, param_type)
|
||||||
|
self.default = default
|
||||||
|
self.expose = expose
|
||||||
|
|
||||||
|
def as_dict(self) -> Dict[str, object]:
|
||||||
|
rd = super().as_dict()
|
||||||
|
rd['default'] = self.default
|
||||||
|
rd['expose'] = 'true' if self.expose else 'false'
|
||||||
|
return rd
|
||||||
|
|
||||||
|
|
||||||
|
class RandParameter(BaseParam):
|
||||||
|
def __init__(self,
|
||||||
|
name: str,
|
||||||
|
desc: Optional[str],
|
||||||
|
param_type: str,
|
||||||
|
randcount: int,
|
||||||
|
randtype: str):
|
||||||
|
assert randcount > 0
|
||||||
|
assert randtype in ['perm', 'data']
|
||||||
|
super().__init__(name, desc, param_type)
|
||||||
|
self.randcount = randcount
|
||||||
|
self.randtype = randtype
|
||||||
|
|
||||||
|
def apply_default(self, value: str) -> None:
|
||||||
|
raise ValueError('Cannot apply a default value of {!r} to '
|
||||||
|
'parameter {}: it is a random netlist constant.'
|
||||||
|
.format(self.name, value))
|
||||||
|
|
||||||
|
def as_dict(self) -> Dict[str, object]:
|
||||||
|
rd = super().as_dict()
|
||||||
|
rd['randcount'] = self.randcount
|
||||||
|
rd['randtype'] = self.randtype
|
||||||
|
return rd
|
||||||
|
|
||||||
|
|
||||||
|
class MemSizeParameter(BaseParam):
|
||||||
|
def __init__(self,
|
||||||
|
name: str,
|
||||||
|
desc: Optional[str],
|
||||||
|
param_type: str):
|
||||||
|
super().__init__(name, desc, param_type)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_parameter(where: str, raw: object) -> BaseParam:
|
||||||
|
rd = check_keys(raw, where,
|
||||||
|
list(REQUIRED_FIELDS.keys()),
|
||||||
|
list(OPTIONAL_FIELDS.keys()))
|
||||||
|
|
||||||
|
# TODO: Check if PascalCase or ALL_CAPS
|
||||||
|
name = check_str(rd['name'], 'name field of ' + where)
|
||||||
|
|
||||||
|
r_desc = rd.get('desc')
|
||||||
|
if r_desc is None:
|
||||||
|
desc = None
|
||||||
|
else:
|
||||||
|
desc = check_str(r_desc, 'desc field of ' + where)
|
||||||
|
|
||||||
|
# TODO: We should probably check that any register called RndCnstFoo has
|
||||||
|
# randtype and randcount.
|
||||||
|
if name.lower().startswith('rndcnst') and 'randtype' in rd:
|
||||||
|
# This is a random netlist constant and should be parsed as a
|
||||||
|
# RandParameter.
|
||||||
|
randtype = check_str(rd.get('randtype', 'none'),
|
||||||
|
'randtype field of ' + where)
|
||||||
|
if randtype not in ['perm', 'data']:
|
||||||
|
raise ValueError('At {}, parameter {} has a name that implies it '
|
||||||
|
'is a random netlist constant, which means it '
|
||||||
|
'must specify a randtype of "perm" or "data", '
|
||||||
|
'rather than {!r}.'
|
||||||
|
.format(where, name, randtype))
|
||||||
|
|
||||||
|
r_randcount = rd.get('randcount')
|
||||||
|
if r_randcount is None:
|
||||||
|
raise ValueError('At {}, the random netlist constant {} has no '
|
||||||
|
'randcount field.'
|
||||||
|
.format(where, name))
|
||||||
|
randcount = check_int(r_randcount, 'randcount field of ' + where)
|
||||||
|
if randcount <= 0:
|
||||||
|
raise ValueError('At {}, the random netlist constant {} has a '
|
||||||
|
'randcount of {}, which is not positive.'
|
||||||
|
.format(where, name, randcount))
|
||||||
|
|
||||||
|
r_type = rd.get('type')
|
||||||
|
if r_type is None:
|
||||||
|
raise ValueError('At {}, parameter {} has no type field (which is '
|
||||||
|
'required for random netlist constants).'
|
||||||
|
.format(where, name))
|
||||||
|
param_type = check_str(r_type, 'type field of ' + where)
|
||||||
|
|
||||||
|
local = check_bool(rd.get('local', 'false'), 'local field of ' + where)
|
||||||
|
if local:
|
||||||
|
raise ValueError('At {}, the parameter {} specifies local = true, '
|
||||||
|
'meaning that it is a localparam. This is '
|
||||||
|
'incompatible with being a random netlist '
|
||||||
|
'constant (how would it be set?)'
|
||||||
|
.format(where, name))
|
||||||
|
|
||||||
|
r_default = rd.get('default')
|
||||||
|
if r_default is not None:
|
||||||
|
raise ValueError('At {}, the parameter {} specifies a value for '
|
||||||
|
'the "default" field. This is incompatible with '
|
||||||
|
'being a random netlist constant: the value will '
|
||||||
|
'be set by the random generator.'
|
||||||
|
.format(where, name))
|
||||||
|
|
||||||
|
expose = check_bool(rd.get('expose', 'false'),
|
||||||
|
'expose field of ' + where)
|
||||||
|
if expose:
|
||||||
|
raise ValueError('At {}, the parameter {} specifies expose = '
|
||||||
|
'true, meaning that the parameter is exposed to '
|
||||||
|
'the top-level. This is incompatible with being '
|
||||||
|
'a random netlist constant.'
|
||||||
|
.format(where, name))
|
||||||
|
|
||||||
|
return RandParameter(name, desc, param_type, randcount, randtype)
|
||||||
|
|
||||||
|
# This doesn't have a name like a random netlist constant. Check that it
|
||||||
|
# doesn't define randcount or randtype.
|
||||||
|
for fld in ['randcount', 'randtype']:
|
||||||
|
if fld in rd:
|
||||||
|
raise ValueError("At {where}, the parameter {name} specifies "
|
||||||
|
"{fld} but the name doesn't look like a random "
|
||||||
|
"netlist constant. To use {fld}, prefix the name "
|
||||||
|
"with RndCnst."
|
||||||
|
.format(where=where, name=name, fld=fld))
|
||||||
|
|
||||||
|
if name.lower().startswith('memsize'):
|
||||||
|
r_type = rd.get('type')
|
||||||
|
if r_type is None:
|
||||||
|
raise ValueError('At {}, parameter {} has no type field (which is '
|
||||||
|
'required for memory size parameters).'
|
||||||
|
.format(where, name))
|
||||||
|
param_type = check_str(r_type, 'type field of ' + where)
|
||||||
|
|
||||||
|
if rd.get('type') != "int":
|
||||||
|
raise ValueError('At {}, memory size parameter {} must be of type integer.'
|
||||||
|
.format(where, name))
|
||||||
|
|
||||||
|
local = check_bool(rd.get('local', 'false'), 'local field of ' + where)
|
||||||
|
if local:
|
||||||
|
raise ValueError('At {}, the parameter {} specifies local = true, '
|
||||||
|
'meaning that it is a localparam. This is '
|
||||||
|
'incompatible with being a memory size parameter.'
|
||||||
|
.format(where, name))
|
||||||
|
|
||||||
|
expose = check_bool(rd.get('expose', 'false'),
|
||||||
|
'expose field of ' + where)
|
||||||
|
if expose:
|
||||||
|
raise ValueError('At {}, the parameter {} specifies expose = '
|
||||||
|
'true, meaning that the parameter is exposed to '
|
||||||
|
'the top-level. This is incompatible with '
|
||||||
|
'being a memory size parameter.'
|
||||||
|
.format(where, name))
|
||||||
|
|
||||||
|
return MemSizeParameter(name, desc, param_type)
|
||||||
|
|
||||||
|
r_type = rd.get('type')
|
||||||
|
if r_type is None:
|
||||||
|
param_type = 'int'
|
||||||
|
else:
|
||||||
|
param_type = check_str(r_type, 'type field of ' + where)
|
||||||
|
|
||||||
|
local = check_bool(rd.get('local', 'true'), 'local field of ' + where)
|
||||||
|
expose = check_bool(rd.get('expose', 'false'), 'expose field of ' + where)
|
||||||
|
|
||||||
|
r_default = rd.get('default')
|
||||||
|
if r_default is None:
|
||||||
|
raise ValueError('At {}, the {} param has no default field.'
|
||||||
|
.format(where, name))
|
||||||
|
else:
|
||||||
|
default = check_str(r_default, 'default field of ' + where)
|
||||||
|
if param_type[:3] == 'int':
|
||||||
|
check_int(default,
|
||||||
|
'default field of {}, (an integer parameter)'
|
||||||
|
.format(name))
|
||||||
|
|
||||||
|
if local:
|
||||||
|
if expose:
|
||||||
|
raise ValueError('At {}, the localparam {} cannot be exposed to '
|
||||||
|
'the top-level.'
|
||||||
|
.format(where, name))
|
||||||
|
return LocalParam(name, desc, param_type, value=default)
|
||||||
|
else:
|
||||||
|
return Parameter(name, desc, param_type, default, expose)
|
||||||
|
|
||||||
|
|
||||||
|
# Note: With a modern enough Python, we'd like this to derive from
|
||||||
|
# "MutableMapping[str, BaseParam]". Unfortunately, this doesn't work with
|
||||||
|
# Python 3.6 (where collections.abc.MutableMapping isn't subscriptable).
|
||||||
|
# So we derive from just "MutableMapping" and tell mypy not to worry
|
||||||
|
# about it.
|
||||||
|
class Params(MutableMapping): # type: ignore
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self.by_name = {} # type: Dict[str, BaseParam]
|
||||||
|
|
||||||
|
def __getitem__(self, key: str) -> BaseParam:
|
||||||
|
return self.by_name[key]
|
||||||
|
|
||||||
|
def __delitem__(self, key: str) -> None:
|
||||||
|
del self.by_name[key]
|
||||||
|
|
||||||
|
def __setitem__(self, key: str, value: BaseParam) -> None:
|
||||||
|
self.by_name[key] = value
|
||||||
|
|
||||||
|
def __iter__(self) -> Iterator[str]:
|
||||||
|
return iter(self.by_name)
|
||||||
|
|
||||||
|
def __len__(self) -> int:
|
||||||
|
return len(self.by_name)
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f"{type(self).__name__}({self.by_name})"
|
||||||
|
|
||||||
|
def add(self, param: BaseParam) -> None:
|
||||||
|
assert param.name not in self.by_name
|
||||||
|
self.by_name[param.name] = param
|
||||||
|
|
||||||
|
def apply_defaults(self, defaults: List[Tuple[str, str]]) -> None:
|
||||||
|
for idx, (key, value) in enumerate(defaults):
|
||||||
|
param = self.by_name[key]
|
||||||
|
if param is None:
|
||||||
|
raise KeyError('Cannot find parameter '
|
||||||
|
'{} to set default value.'
|
||||||
|
.format(key))
|
||||||
|
|
||||||
|
param.apply_default(value)
|
||||||
|
|
||||||
|
def _expand_one(self, value: str, when: str) -> int:
|
||||||
|
# Check whether value is already an integer: if so, return that.
|
||||||
|
try:
|
||||||
|
return int(value, 0)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
param = self.by_name.get(value)
|
||||||
|
if param is None:
|
||||||
|
raise ValueError('Cannot find a parameter called {} when {}. '
|
||||||
|
'Known parameters: {}.'
|
||||||
|
.format(value,
|
||||||
|
when,
|
||||||
|
', '.join(self.by_name.keys())))
|
||||||
|
|
||||||
|
# Only allow localparams in the expansion (because otherwise we're at
|
||||||
|
# the mercy of whatever instantiates the block).
|
||||||
|
if not isinstance(param, LocalParam):
|
||||||
|
raise ValueError("When {}, {} is a not a local parameter."
|
||||||
|
.format(when, value))
|
||||||
|
|
||||||
|
return param.expand_value(when)
|
||||||
|
|
||||||
|
def expand(self, value: str, where: str) -> int:
|
||||||
|
# Here, we want to support arithmetic expressions with + and -. We
|
||||||
|
# don't support other operators, or parentheses (so can parse with just
|
||||||
|
# a regex).
|
||||||
|
#
|
||||||
|
# Use re.split, capturing the operators. This turns e.g. "a + b-c" into
|
||||||
|
# ['a ', '+', ' b', '-', 'c']. If there's a leading operator ("+a"),
|
||||||
|
# the first element of the results is an empty string. This means
|
||||||
|
# elements with odd positions are always operators and elements with
|
||||||
|
# even positions are values.
|
||||||
|
acc = 0
|
||||||
|
is_neg = False
|
||||||
|
|
||||||
|
for idx, tok in enumerate(re.split(r'([+-])', value)):
|
||||||
|
if idx == 0 and not tok:
|
||||||
|
continue
|
||||||
|
if idx % 2:
|
||||||
|
is_neg = (tok == '-')
|
||||||
|
continue
|
||||||
|
|
||||||
|
term = self._expand_one(tok.strip(),
|
||||||
|
'expanding term {} of {}'
|
||||||
|
.format(idx // 2, where))
|
||||||
|
acc += -term if is_neg else term
|
||||||
|
|
||||||
|
return acc
|
||||||
|
|
||||||
|
def as_dicts(self) -> List[Dict[str, object]]:
|
||||||
|
return [p.as_dict() for p in self.by_name.values()]
|
||||||
|
|
||||||
|
|
||||||
|
class ReggenParams(Params):
|
||||||
|
@staticmethod
|
||||||
|
def from_raw(where: str, raw: object) -> 'ReggenParams':
|
||||||
|
ret = ReggenParams()
|
||||||
|
rl = check_list(raw, where)
|
||||||
|
for idx, r_param in enumerate(rl):
|
||||||
|
entry_where = 'entry {} in {}'.format(idx + 1, where)
|
||||||
|
param = _parse_parameter(entry_where, r_param)
|
||||||
|
if param.name in ret:
|
||||||
|
raise ValueError('At {}, found a duplicate parameter with '
|
||||||
|
'name {}.'
|
||||||
|
.format(entry_where, param.name))
|
||||||
|
ret.add(param)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def get_localparams(self) -> List[LocalParam]:
|
||||||
|
ret = []
|
||||||
|
for param in self.by_name.values():
|
||||||
|
if isinstance(param, LocalParam):
|
||||||
|
ret.append(param)
|
||||||
|
return ret
|
|
@ -0,0 +1,45 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from .field import Field
|
||||||
|
|
||||||
|
|
||||||
|
class RegBase:
|
||||||
|
'''An abstract class inherited by Register and MultiRegister
|
||||||
|
|
||||||
|
This represents a block of one or more registers with a base address.
|
||||||
|
|
||||||
|
'''
|
||||||
|
def __init__(self, offset: int):
|
||||||
|
self.offset = offset
|
||||||
|
|
||||||
|
def get_n_bits(self, bittype: List[str]) -> int:
|
||||||
|
'''Get the size of this register / these registers in bits
|
||||||
|
|
||||||
|
See Field.get_n_bits() for the precise meaning of bittype.
|
||||||
|
|
||||||
|
'''
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def get_field_list(self) -> List[Field]:
|
||||||
|
'''Get an ordered list of the fields in the register(s)
|
||||||
|
|
||||||
|
Registers are ordered from low to high address. Within a register,
|
||||||
|
fields are ordered as Register.fields: from LSB to MSB.
|
||||||
|
|
||||||
|
'''
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def is_homogeneous(self) -> bool:
|
||||||
|
'''True if every field in the block is identical
|
||||||
|
|
||||||
|
For a single register, this is true if it only has one field. For a
|
||||||
|
multireg, it is true if the generating register has just one field.
|
||||||
|
Note that if the compact flag is set, the generated registers might
|
||||||
|
have multiple (replicated) fields.
|
||||||
|
|
||||||
|
'''
|
||||||
|
raise NotImplementedError()
|
|
@ -0,0 +1,429 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
'''Code representing the registers, windows etc. for a block'''
|
||||||
|
|
||||||
|
import re
|
||||||
|
from typing import Callable, Dict, List, Optional, Sequence, Union
|
||||||
|
|
||||||
|
from .alert import Alert
|
||||||
|
from .access import SWAccess, HWAccess
|
||||||
|
from .field import Field
|
||||||
|
from .signal import Signal
|
||||||
|
from .lib import check_int, check_list, check_str_dict, check_str
|
||||||
|
from .multi_register import MultiRegister
|
||||||
|
from .params import ReggenParams
|
||||||
|
from .register import Register
|
||||||
|
from .window import Window
|
||||||
|
|
||||||
|
|
||||||
|
class RegBlock:
|
||||||
|
def __init__(self, reg_width: int, params: ReggenParams):
|
||||||
|
|
||||||
|
self._addrsep = (reg_width + 7) // 8
|
||||||
|
self._reg_width = reg_width
|
||||||
|
self._params = params
|
||||||
|
|
||||||
|
self.offset = 0
|
||||||
|
self.multiregs = [] # type: List[MultiRegister]
|
||||||
|
self.registers = [] # type: List[Register]
|
||||||
|
self.windows = [] # type: List[Window]
|
||||||
|
|
||||||
|
# Boolean indication whether ANY window in regblock has data integrity passthrough
|
||||||
|
self.has_data_intg_passthru = False
|
||||||
|
|
||||||
|
# A list of all registers, expanding multiregs, ordered by offset
|
||||||
|
self.flat_regs = [] # type: List[Register]
|
||||||
|
|
||||||
|
# A list of registers and multiregisters (unexpanded)
|
||||||
|
self.all_regs = [] # type: List[Union[Register, MultiRegister]]
|
||||||
|
|
||||||
|
# A list of all the underlying register types used in the block. This
|
||||||
|
# has one entry for each actual Register, plus a single entry giving
|
||||||
|
# the underlying register for each MultiRegister.
|
||||||
|
self.type_regs = [] # type: List[Register]
|
||||||
|
|
||||||
|
# A list with everything in order
|
||||||
|
self.entries = [] # type: List[object]
|
||||||
|
|
||||||
|
# A dict of named entries, mapping name to offset
|
||||||
|
self.name_to_offset = {} # type: Dict[str, int]
|
||||||
|
|
||||||
|
# A dict of all registers (expanding multiregs), mapping name to the
|
||||||
|
# register object
|
||||||
|
self.name_to_flat_reg = {} # type: Dict[str, Register]
|
||||||
|
|
||||||
|
# A list of all write enable names
|
||||||
|
self.wennames = [] # type: List[str]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def build_blocks(block: 'RegBlock',
|
||||||
|
raw: object) -> Dict[Optional[str], 'RegBlock']:
|
||||||
|
'''Build a dictionary of blocks for a 'registers' field in the hjson
|
||||||
|
|
||||||
|
There are two different syntaxes we might see here. The simple syntax
|
||||||
|
just consists of a list of entries (register, multireg, window,
|
||||||
|
skipto). If we see that, each entry gets added to init_block and then
|
||||||
|
we return {None: init_block}.
|
||||||
|
|
||||||
|
The more complicated syntax is a dictionary. This parses from hjson as
|
||||||
|
an OrderedDict which we walk in document order. Entries from the first
|
||||||
|
key/value pair in the dictionary will be added to init_block. Later
|
||||||
|
key/value pairs start empty RegBlocks. The return value is a dictionary
|
||||||
|
mapping the keys we saw to their respective RegBlocks.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if isinstance(raw, list):
|
||||||
|
# This is the simple syntax
|
||||||
|
block.add_raw_registers(raw, 'registers field at top-level')
|
||||||
|
return {None: block}
|
||||||
|
|
||||||
|
# This is the more complicated syntax
|
||||||
|
if not isinstance(raw, dict):
|
||||||
|
raise ValueError('registers field at top-level is '
|
||||||
|
'neither a list or a dictionary.')
|
||||||
|
|
||||||
|
ret = {} # type: Dict[Optional[str], RegBlock]
|
||||||
|
for idx, (r_key, r_val) in enumerate(raw.items()):
|
||||||
|
if idx > 0:
|
||||||
|
block = RegBlock(block._reg_width, block._params)
|
||||||
|
|
||||||
|
rb_key = check_str(r_key,
|
||||||
|
'the key for item {} of '
|
||||||
|
'the registers dictionary at top-level'
|
||||||
|
.format(idx + 1))
|
||||||
|
rb_val = check_list(r_val,
|
||||||
|
'the value for item {} of '
|
||||||
|
'the registers dictionary at top-level'
|
||||||
|
.format(idx + 1))
|
||||||
|
|
||||||
|
block.add_raw_registers(rb_val,
|
||||||
|
'item {} of the registers '
|
||||||
|
'dictionary at top-level'
|
||||||
|
.format(idx + 1))
|
||||||
|
block.validate()
|
||||||
|
|
||||||
|
assert rb_key not in ret
|
||||||
|
ret[rb_key] = block
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def add_raw_registers(self, raw: object, what: str) -> None:
|
||||||
|
rl = check_list(raw, 'registers field at top-level')
|
||||||
|
for entry_idx, entry_raw in enumerate(rl):
|
||||||
|
where = ('entry {} of the top-level registers field'
|
||||||
|
.format(entry_idx + 1))
|
||||||
|
self.add_raw(where, entry_raw)
|
||||||
|
|
||||||
|
def add_raw(self, where: str, raw: object) -> None:
|
||||||
|
entry = check_str_dict(raw, where)
|
||||||
|
|
||||||
|
handlers = {
|
||||||
|
'register': self._handle_register,
|
||||||
|
'reserved': self._handle_reserved,
|
||||||
|
'skipto': self._handle_skipto,
|
||||||
|
'window': self._handle_window,
|
||||||
|
'multireg': self._handle_multireg
|
||||||
|
}
|
||||||
|
|
||||||
|
entry_type = 'register'
|
||||||
|
entry_body = entry # type: object
|
||||||
|
|
||||||
|
for t in ['reserved', 'skipto', 'window', 'multireg']:
|
||||||
|
t_body = entry.get(t)
|
||||||
|
if t_body is not None:
|
||||||
|
# Special entries look like { window: { ... } }, so if we
|
||||||
|
# get a hit, this should be the only key in entry. Note
|
||||||
|
# that this also checks that nothing has more than one
|
||||||
|
# entry type.
|
||||||
|
if len(entry) != 1:
|
||||||
|
other_keys = [k for k in entry if k != t]
|
||||||
|
assert other_keys
|
||||||
|
raise ValueError('At offset {:#x}, {} has key {}, which '
|
||||||
|
'should give its type. But it also has '
|
||||||
|
'other keys too: {}.'
|
||||||
|
.format(self.offset,
|
||||||
|
where, t, ', '.join(other_keys)))
|
||||||
|
entry_type = t
|
||||||
|
entry_body = t_body
|
||||||
|
|
||||||
|
entry_where = ('At offset {:#x}, {}, type {!r}'
|
||||||
|
.format(self.offset, where, entry_type))
|
||||||
|
|
||||||
|
handlers[entry_type](entry_where, entry_body)
|
||||||
|
|
||||||
|
def _handle_register(self, where: str, body: object) -> None:
|
||||||
|
reg = Register.from_raw(self._reg_width,
|
||||||
|
self.offset, self._params, body)
|
||||||
|
self.add_register(reg)
|
||||||
|
|
||||||
|
def _handle_reserved(self, where: str, body: object) -> None:
|
||||||
|
nreserved = check_int(body, 'body of ' + where)
|
||||||
|
if nreserved <= 0:
|
||||||
|
raise ValueError('Reserved count in {} is {}, '
|
||||||
|
'which is not positive.'
|
||||||
|
.format(where, nreserved))
|
||||||
|
|
||||||
|
self.offset += self._addrsep * nreserved
|
||||||
|
|
||||||
|
def _handle_skipto(self, where: str, body: object) -> None:
|
||||||
|
skipto = check_int(body, 'body of ' + where)
|
||||||
|
if skipto < self.offset:
|
||||||
|
raise ValueError('Destination of skipto in {} is {:#x}, '
|
||||||
|
'is less than the current offset, {:#x}.'
|
||||||
|
.format(where, skipto, self.offset))
|
||||||
|
if skipto % self._addrsep:
|
||||||
|
raise ValueError('Destination of skipto in {} is {:#x}, '
|
||||||
|
'not a multiple of addrsep, {:#x}.'
|
||||||
|
.format(where, skipto, self._addrsep))
|
||||||
|
self.offset = skipto
|
||||||
|
|
||||||
|
def _handle_window(self, where: str, body: object) -> None:
|
||||||
|
window = Window.from_raw(self.offset,
|
||||||
|
self._reg_width, self._params, body)
|
||||||
|
if window.name is not None:
|
||||||
|
lname = window.name.lower()
|
||||||
|
if lname in self.name_to_offset:
|
||||||
|
raise ValueError('Window {} (at offset {:#x}) has the '
|
||||||
|
'same name as something at offset {:#x}.'
|
||||||
|
.format(window.name, window.offset,
|
||||||
|
self.name_to_offset[lname]))
|
||||||
|
self.add_window(window)
|
||||||
|
|
||||||
|
def _handle_multireg(self, where: str, body: object) -> None:
|
||||||
|
mr = MultiRegister(self.offset,
|
||||||
|
self._addrsep, self._reg_width, self._params, body)
|
||||||
|
for reg in mr.regs:
|
||||||
|
lname = reg.name.lower()
|
||||||
|
if lname in self.name_to_offset:
|
||||||
|
raise ValueError('Multiregister {} (at offset {:#x}) expands '
|
||||||
|
'to a register with name {} (at offset '
|
||||||
|
'{:#x}), but this already names something at '
|
||||||
|
'offset {:#x}.'
|
||||||
|
.format(mr.reg.name, mr.reg.offset,
|
||||||
|
reg.name, reg.offset,
|
||||||
|
self.name_to_offset[lname]))
|
||||||
|
self._add_flat_reg(reg)
|
||||||
|
if mr.dv_compact is False:
|
||||||
|
self.type_regs.append(reg)
|
||||||
|
self.name_to_offset[lname] = reg.offset
|
||||||
|
|
||||||
|
self.multiregs.append(mr)
|
||||||
|
self.all_regs.append(mr)
|
||||||
|
if mr.dv_compact is True:
|
||||||
|
self.type_regs.append(mr.reg)
|
||||||
|
self.entries.append(mr)
|
||||||
|
self.offset = mr.next_offset(self._addrsep)
|
||||||
|
|
||||||
|
def add_register(self, reg: Register) -> None:
|
||||||
|
assert reg.offset == self.offset
|
||||||
|
|
||||||
|
lname = reg.name.lower()
|
||||||
|
if lname in self.name_to_offset:
|
||||||
|
raise ValueError('Register {} (at offset {:#x}) has the same '
|
||||||
|
'name as something at offset {:#x}.'
|
||||||
|
.format(reg.name, reg.offset,
|
||||||
|
self.name_to_offset[lname]))
|
||||||
|
self._add_flat_reg(reg)
|
||||||
|
self.name_to_offset[lname] = reg.offset
|
||||||
|
|
||||||
|
self.registers.append(reg)
|
||||||
|
self.all_regs.append(reg)
|
||||||
|
self.type_regs.append(reg)
|
||||||
|
self.entries.append(reg)
|
||||||
|
self.offset = reg.next_offset(self._addrsep)
|
||||||
|
|
||||||
|
if reg.regwen is not None and reg.regwen not in self.wennames:
|
||||||
|
self.wennames.append(reg.regwen)
|
||||||
|
|
||||||
|
def _add_flat_reg(self, reg: Register) -> None:
|
||||||
|
# The first assertion is checked at the call site (where we can print
|
||||||
|
# out a nicer message for multiregs). The second assertion should be
|
||||||
|
# implied by the first.
|
||||||
|
assert reg.name not in self.name_to_offset
|
||||||
|
assert reg.name not in self.name_to_flat_reg
|
||||||
|
|
||||||
|
self.flat_regs.append(reg)
|
||||||
|
self.name_to_flat_reg[reg.name.lower()] = reg
|
||||||
|
|
||||||
|
def add_window(self, window: Window) -> None:
|
||||||
|
if window.name is not None:
|
||||||
|
lname = window.name.lower()
|
||||||
|
assert lname not in self.name_to_offset
|
||||||
|
self.name_to_offset[lname] = window.offset
|
||||||
|
|
||||||
|
self.windows.append(window)
|
||||||
|
self.entries.append(window)
|
||||||
|
assert self.offset <= window.offset
|
||||||
|
self.offset = window.next_offset(self._addrsep)
|
||||||
|
|
||||||
|
self.has_data_intg_passthru |= window.data_intg_passthru
|
||||||
|
|
||||||
|
def validate(self) -> None:
|
||||||
|
'''Run this to check consistency after all registers have been added'''
|
||||||
|
|
||||||
|
# Check that every write-enable register has a good name, a valid reset
|
||||||
|
# value, and valid access permissions.
|
||||||
|
for wenname in self.wennames:
|
||||||
|
# check the REGWEN naming convention
|
||||||
|
if re.fullmatch(r'(.+_)*REGWEN(_[0-9]+)?', wenname) is None:
|
||||||
|
raise ValueError("Regwen name {} must have the suffix '_REGWEN'"
|
||||||
|
.format(wenname))
|
||||||
|
|
||||||
|
wen_reg = self.name_to_flat_reg.get(wenname.lower())
|
||||||
|
if wen_reg is None:
|
||||||
|
raise ValueError('One or more registers use {} as a '
|
||||||
|
'write-enable, but there is no such register.'
|
||||||
|
.format(wenname))
|
||||||
|
|
||||||
|
wen_reg.check_valid_regwen()
|
||||||
|
|
||||||
|
def get_n_bits(self, bittype: List[str] = ["q"]) -> int:
|
||||||
|
'''Returns number of bits in registers in this block.
|
||||||
|
|
||||||
|
This includes those expanded from multiregs. See Field.get_n_bits for a
|
||||||
|
description of the bittype argument.
|
||||||
|
|
||||||
|
'''
|
||||||
|
return sum(reg.get_n_bits(bittype) for reg in self.flat_regs)
|
||||||
|
|
||||||
|
def as_dicts(self) -> List[object]:
|
||||||
|
entries = [] # type: List[object]
|
||||||
|
offset = 0
|
||||||
|
for entry in self.entries:
|
||||||
|
assert (isinstance(entry, Register) or
|
||||||
|
isinstance(entry, MultiRegister) or
|
||||||
|
isinstance(entry, Window))
|
||||||
|
|
||||||
|
next_off = entry.offset
|
||||||
|
assert offset <= next_off
|
||||||
|
res_bytes = next_off - offset
|
||||||
|
if res_bytes:
|
||||||
|
assert res_bytes % self._addrsep == 0
|
||||||
|
entries.append({'reserved': res_bytes // self._addrsep})
|
||||||
|
|
||||||
|
entries.append(entry)
|
||||||
|
offset = entry.next_offset(self._addrsep)
|
||||||
|
|
||||||
|
return entries
|
||||||
|
|
||||||
|
_FieldFormatter = Callable[[bool, str], str]
|
||||||
|
|
||||||
|
def _add_intr_alert_reg(self,
|
||||||
|
signals: Sequence[Signal],
|
||||||
|
reg_name: str,
|
||||||
|
reg_desc: str,
|
||||||
|
field_desc_fmt: Optional[Union[str, _FieldFormatter]],
|
||||||
|
swaccess: str,
|
||||||
|
hwaccess: str,
|
||||||
|
is_testreg: bool,
|
||||||
|
reg_tags: List[str]) -> None:
|
||||||
|
swaccess_obj = SWAccess('RegBlock._make_intr_alert_reg()', swaccess)
|
||||||
|
hwaccess_obj = HWAccess('RegBlock._make_intr_alert_reg()', hwaccess)
|
||||||
|
|
||||||
|
fields = []
|
||||||
|
for signal in signals:
|
||||||
|
if field_desc_fmt is None:
|
||||||
|
field_desc = signal.desc
|
||||||
|
elif isinstance(field_desc_fmt, str):
|
||||||
|
field_desc = field_desc_fmt
|
||||||
|
else:
|
||||||
|
width = signal.bits.width()
|
||||||
|
field_desc = field_desc_fmt(width > 1, signal.name)
|
||||||
|
|
||||||
|
fields.append(Field(signal.name,
|
||||||
|
field_desc or signal.desc,
|
||||||
|
tags=[],
|
||||||
|
swaccess=swaccess_obj,
|
||||||
|
hwaccess=hwaccess_obj,
|
||||||
|
bits=signal.bits,
|
||||||
|
resval=0,
|
||||||
|
enum=None))
|
||||||
|
|
||||||
|
reg = Register(self.offset,
|
||||||
|
reg_name,
|
||||||
|
reg_desc,
|
||||||
|
hwext=is_testreg,
|
||||||
|
hwqe=is_testreg,
|
||||||
|
hwre=False,
|
||||||
|
regwen=None,
|
||||||
|
tags=reg_tags,
|
||||||
|
resval=None,
|
||||||
|
shadowed=False,
|
||||||
|
fields=fields,
|
||||||
|
update_err_alert=None,
|
||||||
|
storage_err_alert=None)
|
||||||
|
self.add_register(reg)
|
||||||
|
|
||||||
|
def make_intr_regs(self, interrupts: Sequence[Signal]) -> None:
|
||||||
|
assert interrupts
|
||||||
|
assert interrupts[-1].bits.msb < self._reg_width
|
||||||
|
|
||||||
|
self._add_intr_alert_reg(interrupts,
|
||||||
|
'INTR_STATE',
|
||||||
|
'Interrupt State Register',
|
||||||
|
None,
|
||||||
|
'rw1c',
|
||||||
|
'hrw',
|
||||||
|
False,
|
||||||
|
# Some POR routines have the potential to
|
||||||
|
# unpredictably set some `intr_state` fields
|
||||||
|
# for various IPs, so we exclude all
|
||||||
|
# `intr_state` accesses from CSR checks to
|
||||||
|
# prevent this from occurring.
|
||||||
|
#
|
||||||
|
# An example of an `intr_state` mismatch error
|
||||||
|
# occurring due to a POR routine can be seen in
|
||||||
|
# issue #6888.
|
||||||
|
["excl:CsrAllTests:CsrExclAll"])
|
||||||
|
self._add_intr_alert_reg(interrupts,
|
||||||
|
'INTR_ENABLE',
|
||||||
|
'Interrupt Enable Register',
|
||||||
|
lambda w, n: ('Enable interrupt when '
|
||||||
|
'{}!!INTR_STATE.{} is set.'
|
||||||
|
.format('corresponding bit in '
|
||||||
|
if w else '',
|
||||||
|
n)),
|
||||||
|
'rw',
|
||||||
|
'hro',
|
||||||
|
False,
|
||||||
|
[])
|
||||||
|
self._add_intr_alert_reg(interrupts,
|
||||||
|
'INTR_TEST',
|
||||||
|
'Interrupt Test Register',
|
||||||
|
lambda w, n: ('Write 1 to force '
|
||||||
|
'{}!!INTR_STATE.{} to 1.'
|
||||||
|
.format('corresponding bit in '
|
||||||
|
if w else '',
|
||||||
|
n)),
|
||||||
|
'wo',
|
||||||
|
'hro',
|
||||||
|
True,
|
||||||
|
# intr_test csr is WO so reads back 0s
|
||||||
|
["excl:CsrNonInitTests:CsrExclWrite"])
|
||||||
|
|
||||||
|
def make_alert_regs(self, alerts: List[Alert]) -> None:
|
||||||
|
assert alerts
|
||||||
|
assert len(alerts) < self._reg_width
|
||||||
|
self._add_intr_alert_reg(alerts,
|
||||||
|
'ALERT_TEST',
|
||||||
|
'Alert Test Register',
|
||||||
|
('Write 1 to trigger '
|
||||||
|
'one alert event of this kind.'),
|
||||||
|
'wo',
|
||||||
|
'hro',
|
||||||
|
True,
|
||||||
|
[])
|
||||||
|
|
||||||
|
def get_addr_width(self) -> int:
|
||||||
|
'''Calculate the number of bits to address every byte of the block'''
|
||||||
|
return (self.offset - 1).bit_length()
|
||||||
|
|
||||||
|
def has_shadowed_reg(self) -> bool:
|
||||||
|
'''Return boolean indication whether reg block contains shadowed reigsters'''
|
||||||
|
for r in self.flat_regs:
|
||||||
|
if r.shadowed:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
|
@ -0,0 +1,74 @@
|
||||||
|
/* Stylesheet for reggen HTML register output */
|
||||||
|
/* Copyright lowRISC contributors. */
|
||||||
|
/* Licensed under the Apache License, Version 2.0, see LICENSE for details. */
|
||||||
|
/* SPDX-License-Identifier: Apache-2.0 */
|
||||||
|
|
||||||
|
table.regpic {
|
||||||
|
width: 95%;
|
||||||
|
border-collapse: collapse;
|
||||||
|
margin-left:auto;
|
||||||
|
margin-right:auto;
|
||||||
|
table-layout:fixed;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.regdef {
|
||||||
|
border: 1px solid black;
|
||||||
|
width: 80%;
|
||||||
|
border-collapse: collapse;
|
||||||
|
margin-left:auto;
|
||||||
|
margin-right:auto;
|
||||||
|
table-layout:auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.regdef th {
|
||||||
|
border: 1px solid black;
|
||||||
|
font-family: sans-serif;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
td.bitnum {
|
||||||
|
font-size: 60%;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
td.unused {
|
||||||
|
border: 1px solid black;
|
||||||
|
background-color: gray;
|
||||||
|
}
|
||||||
|
|
||||||
|
td.fname {
|
||||||
|
border: 1px solid black;
|
||||||
|
text-align: center;
|
||||||
|
font-family: sans-serif;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
td.regbits, td.regperm, td.regrv {
|
||||||
|
border: 1px solid black;
|
||||||
|
text-align: center;
|
||||||
|
font-family: sans-serif;
|
||||||
|
}
|
||||||
|
|
||||||
|
td.regde, td.regfn {
|
||||||
|
border: 1px solid black;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.cfgtable {
|
||||||
|
border: 1px solid black;
|
||||||
|
width: 80%;
|
||||||
|
border-collapse: collapse;
|
||||||
|
margin-left:auto;
|
||||||
|
margin-right:auto;
|
||||||
|
table-layout:auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.cfgtable th {
|
||||||
|
border: 1px solid black;
|
||||||
|
font-family: sans-serif;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.cfgtable td {
|
||||||
|
border: 1px solid black;
|
||||||
|
font-family: sans-serif;
|
||||||
|
}
|
|
@ -0,0 +1,392 @@
|
||||||
|
// Copyright lowRISC contributors.
|
||||||
|
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
//
|
||||||
|
// Register Package auto-generated by `reggen` containing data structure
|
||||||
|
<%
|
||||||
|
from topgen import lib # TODO: Split lib to common lib module
|
||||||
|
|
||||||
|
from reggen.access import HwAccess, SwRdAccess, SwWrAccess
|
||||||
|
from reggen.register import Register
|
||||||
|
from reggen.multi_register import MultiRegister
|
||||||
|
|
||||||
|
from reggen import gen_rtl
|
||||||
|
|
||||||
|
localparams = block.params.get_localparams()
|
||||||
|
|
||||||
|
addr_widths = gen_rtl.get_addr_widths(block)
|
||||||
|
|
||||||
|
lblock = block.name.lower()
|
||||||
|
ublock = lblock.upper()
|
||||||
|
|
||||||
|
def reg_pfx(reg):
|
||||||
|
return '{}_{}'.format(ublock, reg.name.upper())
|
||||||
|
|
||||||
|
def reg_resname(reg):
|
||||||
|
return '{}_RESVAL'.format(reg_pfx(reg))
|
||||||
|
|
||||||
|
def field_resname(reg, field):
|
||||||
|
return '{}_{}_RESVAL'.format(reg_pfx(reg), field.name.upper())
|
||||||
|
|
||||||
|
%>\
|
||||||
|
<%def name="typedefs_for_iface(iface_name, iface_desc, for_iface, rb)">\
|
||||||
|
<%
|
||||||
|
hdr = gen_rtl.make_box_quote('Typedefs for registers' + for_iface)
|
||||||
|
%>\
|
||||||
|
% for r in rb.all_regs:
|
||||||
|
% if r.get_n_bits(["q"]):
|
||||||
|
% if hdr:
|
||||||
|
|
||||||
|
${hdr}
|
||||||
|
% endif
|
||||||
|
<%
|
||||||
|
r0 = gen_rtl.get_r0(r)
|
||||||
|
hdr = None
|
||||||
|
%>\
|
||||||
|
|
||||||
|
typedef struct packed {
|
||||||
|
% if r.is_homogeneous():
|
||||||
|
## If we have a homogeneous register or multireg, there is just one field
|
||||||
|
## (possibly replicated many times). The typedef is for one copy of that
|
||||||
|
## field.
|
||||||
|
<%
|
||||||
|
field = r.get_field_list()[0]
|
||||||
|
field_q_width = field.get_n_bits(r0.hwext, r0.hwqe, r0.hwre, ['q'])
|
||||||
|
field_q_bits = lib.bitarray(field_q_width, 2)
|
||||||
|
%>\
|
||||||
|
logic ${field_q_bits} q;
|
||||||
|
% if r0.hwqe:
|
||||||
|
logic qe;
|
||||||
|
% endif
|
||||||
|
% if r0.hwre or (r0.shadowed and r0.hwext):
|
||||||
|
logic re;
|
||||||
|
% endif
|
||||||
|
% if r0.shadowed and not r0.hwext:
|
||||||
|
logic err_update;
|
||||||
|
logic err_storage;
|
||||||
|
% endif
|
||||||
|
% else:
|
||||||
|
## We are inhomogeneous, which means there is more than one different
|
||||||
|
## field. Generate a reg2hw typedef that packs together all the fields of
|
||||||
|
## the register.
|
||||||
|
% for f in r0.fields:
|
||||||
|
<%
|
||||||
|
field_q_width = f.get_n_bits(r0.hwext, r0.hwqe, r0.hwre, ["q"])
|
||||||
|
%>\
|
||||||
|
% if field_q_width:
|
||||||
|
<%
|
||||||
|
field_q_bits = lib.bitarray(field_q_width, 2)
|
||||||
|
struct_name = f.name.lower()
|
||||||
|
%>\
|
||||||
|
struct packed {
|
||||||
|
logic ${field_q_bits} q;
|
||||||
|
% if r0.hwqe:
|
||||||
|
logic qe;
|
||||||
|
% endif
|
||||||
|
% if r0.hwre or (r0.shadowed and r0.hwext):
|
||||||
|
logic re;
|
||||||
|
% endif
|
||||||
|
% if r0.shadowed and not r0.hwext:
|
||||||
|
logic err_update;
|
||||||
|
logic err_storage;
|
||||||
|
% endif
|
||||||
|
} ${struct_name};
|
||||||
|
%endif
|
||||||
|
%endfor
|
||||||
|
%endif
|
||||||
|
} ${gen_rtl.get_reg_tx_type(block, r, False)};
|
||||||
|
%endif
|
||||||
|
% endfor
|
||||||
|
% for r in rb.all_regs:
|
||||||
|
% if r.get_n_bits(["d"]):
|
||||||
|
% if hdr:
|
||||||
|
|
||||||
|
${hdr}
|
||||||
|
% endif
|
||||||
|
<%
|
||||||
|
r0 = gen_rtl.get_r0(r)
|
||||||
|
hdr = None
|
||||||
|
%>\
|
||||||
|
|
||||||
|
typedef struct packed {
|
||||||
|
% if r.is_homogeneous():
|
||||||
|
## If we have a homogeneous register or multireg, there is just one field
|
||||||
|
## (possibly replicated many times). The typedef is for one copy of that
|
||||||
|
## field.
|
||||||
|
<%
|
||||||
|
field = r.get_field_list()[0]
|
||||||
|
field_d_width = field.get_n_bits(r0.hwext, r0.hwqe, r0.hwre, ['d'])
|
||||||
|
field_d_bits = lib.bitarray(field_d_width, 2)
|
||||||
|
%>\
|
||||||
|
logic ${field_d_bits} d;
|
||||||
|
% if not r0.hwext:
|
||||||
|
logic de;
|
||||||
|
% endif
|
||||||
|
% else:
|
||||||
|
## We are inhomogeneous, which means there is more than one different
|
||||||
|
## field. Generate a hw2reg typedef that packs together all the fields of
|
||||||
|
## the register.
|
||||||
|
% for f in r0.fields:
|
||||||
|
<%
|
||||||
|
field_d_width = f.get_n_bits(r0.hwext, r0.hwqe, r0.hwre, ["d"])
|
||||||
|
%>\
|
||||||
|
% if field_d_width:
|
||||||
|
<%
|
||||||
|
field_d_bits = lib.bitarray(field_d_width, 2)
|
||||||
|
struct_name = f.name.lower()
|
||||||
|
%>\
|
||||||
|
struct packed {
|
||||||
|
logic ${field_d_bits} d;
|
||||||
|
% if not r0.hwext:
|
||||||
|
logic de;
|
||||||
|
% endif
|
||||||
|
} ${struct_name};
|
||||||
|
%endif
|
||||||
|
%endfor
|
||||||
|
%endif
|
||||||
|
} ${gen_rtl.get_reg_tx_type(block, r, True)};
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
% if block.expose_reg_if:
|
||||||
|
<%
|
||||||
|
lpfx = gen_rtl.get_type_name_pfx(block, iface_name)
|
||||||
|
addr_width = rb.get_addr_width()
|
||||||
|
data_width = block.regwidth
|
||||||
|
data_byte_width = data_width // 8
|
||||||
|
|
||||||
|
# This will produce strings like "[0:0] " to let us keep
|
||||||
|
# everything lined up whether there's 1 or 2 digits in the MSB.
|
||||||
|
aw_bits = f'[{addr_width-1}:0]'.ljust(6)
|
||||||
|
dw_bits = f'[{data_width-1}:0]'.ljust(6)
|
||||||
|
dbw_bits = f'[{data_byte_width-1}:0]'.ljust(6)
|
||||||
|
%>\
|
||||||
|
|
||||||
|
typedef struct packed {
|
||||||
|
logic reg_we;
|
||||||
|
logic reg_re;
|
||||||
|
logic ${aw_bits} reg_addr;
|
||||||
|
logic ${dw_bits} reg_wdata;
|
||||||
|
logic ${dbw_bits} reg_be;
|
||||||
|
} ${lpfx}_reg2hw_reg_if_t;
|
||||||
|
% endif
|
||||||
|
</%def>\
|
||||||
|
<%def name="reg2hw_for_iface(iface_name, iface_desc, for_iface, rb)">\
|
||||||
|
<%
|
||||||
|
lpfx = gen_rtl.get_type_name_pfx(block, iface_name)
|
||||||
|
nbits = rb.get_n_bits(["q", "qe", "re"])
|
||||||
|
packbit = 0
|
||||||
|
|
||||||
|
addr_width = rb.get_addr_width()
|
||||||
|
data_width = block.regwidth
|
||||||
|
data_byte_width = data_width // 8
|
||||||
|
reg_if_width = 2 + addr_width + data_width + data_byte_width
|
||||||
|
%>\
|
||||||
|
% if nbits > 0:
|
||||||
|
|
||||||
|
// Register -> HW type${for_iface}
|
||||||
|
typedef struct packed {
|
||||||
|
% if block.expose_reg_if:
|
||||||
|
${lpfx}_reg2hw_reg_if_t reg_if; // [${reg_if_width + nbits - 1}:${nbits}]
|
||||||
|
% endif
|
||||||
|
% for r in rb.all_regs:
|
||||||
|
% if r.get_n_bits(["q"]):
|
||||||
|
<%
|
||||||
|
r0 = gen_rtl.get_r0(r)
|
||||||
|
struct_type = gen_rtl.get_reg_tx_type(block, r, False)
|
||||||
|
struct_width = r0.get_n_bits(['q', 'qe', 're'])
|
||||||
|
|
||||||
|
if isinstance(r, MultiRegister):
|
||||||
|
struct_type += " [{}:0]".format(r.count - 1)
|
||||||
|
struct_width *= r.count
|
||||||
|
|
||||||
|
msb = nbits - packbit - 1
|
||||||
|
lsb = msb - struct_width + 1
|
||||||
|
packbit += struct_width
|
||||||
|
name_and_comment = f'{r0.name.lower()}; // [{msb}:{lsb}]'
|
||||||
|
%>\
|
||||||
|
% if 4 + len(struct_type) + 1 + len(name_and_comment) <= 100:
|
||||||
|
${struct_type} ${name_and_comment}
|
||||||
|
% else:
|
||||||
|
${struct_type}
|
||||||
|
${name_and_comment}
|
||||||
|
% endif
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
} ${gen_rtl.get_iface_tx_type(block, iface_name, False)};
|
||||||
|
% endif
|
||||||
|
</%def>\
|
||||||
|
<%def name="hw2reg_for_iface(iface_name, iface_desc, for_iface, rb)">\
|
||||||
|
<%
|
||||||
|
nbits = rb.get_n_bits(["d", "de"])
|
||||||
|
packbit = 0
|
||||||
|
%>\
|
||||||
|
% if nbits > 0:
|
||||||
|
|
||||||
|
// HW -> register type${for_iface}
|
||||||
|
typedef struct packed {
|
||||||
|
% for r in rb.all_regs:
|
||||||
|
% if r.get_n_bits(["d"]):
|
||||||
|
<%
|
||||||
|
r0 = gen_rtl.get_r0(r)
|
||||||
|
struct_type = gen_rtl.get_reg_tx_type(block, r, True)
|
||||||
|
struct_width = r0.get_n_bits(['d', 'de'])
|
||||||
|
|
||||||
|
if isinstance(r, MultiRegister):
|
||||||
|
struct_type += " [{}:0]".format(r.count - 1)
|
||||||
|
struct_width *= r.count
|
||||||
|
|
||||||
|
msb = nbits - packbit - 1
|
||||||
|
lsb = msb - struct_width + 1
|
||||||
|
packbit += struct_width
|
||||||
|
name_and_comment = f'{r0.name.lower()}; // [{msb}:{lsb}]'
|
||||||
|
%>\
|
||||||
|
% if 4 + len(struct_type) + 1 + len(name_and_comment) <= 100:
|
||||||
|
${struct_type} ${name_and_comment}
|
||||||
|
% else:
|
||||||
|
${struct_type}
|
||||||
|
${name_and_comment}
|
||||||
|
% endif
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
} ${gen_rtl.get_iface_tx_type(block, iface_name, True)};
|
||||||
|
% endif
|
||||||
|
</%def>\
|
||||||
|
<%def name="offsets_for_iface(iface_name, iface_desc, for_iface, rb)">\
|
||||||
|
% if not rb.flat_regs:
|
||||||
|
<% return STOP_RENDERING %>
|
||||||
|
% endif
|
||||||
|
|
||||||
|
// Register offsets${for_iface}
|
||||||
|
<%
|
||||||
|
aw_name, aw = addr_widths[iface_name]
|
||||||
|
%>\
|
||||||
|
% for r in rb.flat_regs:
|
||||||
|
<%
|
||||||
|
value = "{}'h{:x}".format(aw, r.offset)
|
||||||
|
%>\
|
||||||
|
parameter logic [${aw_name}-1:0] ${reg_pfx(r)}_OFFSET = ${value};
|
||||||
|
% endfor
|
||||||
|
</%def>\
|
||||||
|
<%def name="hwext_resvals_for_iface(iface_name, iface_desc, for_iface, rb)">\
|
||||||
|
<%
|
||||||
|
hwext_regs = [r for r in rb.flat_regs if r.hwext]
|
||||||
|
%>\
|
||||||
|
% if hwext_regs:
|
||||||
|
|
||||||
|
// Reset values for hwext registers and their fields${for_iface}
|
||||||
|
% for reg in hwext_regs:
|
||||||
|
<%
|
||||||
|
reg_width = reg.get_width()
|
||||||
|
reg_msb = reg_width - 1
|
||||||
|
reg_resval = "{}'h{:x}".format(reg_width, reg.resval)
|
||||||
|
%>\
|
||||||
|
parameter logic [${reg_msb}:0] ${reg_resname(reg)} = ${reg_resval};
|
||||||
|
% for field in reg.fields:
|
||||||
|
% if field.resval is not None:
|
||||||
|
<%
|
||||||
|
field_width = field.bits.width()
|
||||||
|
field_msb = field_width - 1
|
||||||
|
field_resval = "{}'h{:x}".format(field_width, field.resval)
|
||||||
|
%>\
|
||||||
|
parameter logic [${field_msb}:0] ${field_resname(reg, field)} = ${field_resval};
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
% endfor
|
||||||
|
% endif
|
||||||
|
</%def>\
|
||||||
|
<%def name="windows_for_iface(iface_name, iface_desc, for_iface, rb)">\
|
||||||
|
% if rb.windows:
|
||||||
|
<%
|
||||||
|
aw_name, aw = addr_widths[iface_name]
|
||||||
|
%>\
|
||||||
|
|
||||||
|
// Window parameters${for_iface}
|
||||||
|
% for i,w in enumerate(rb.windows):
|
||||||
|
<%
|
||||||
|
win_pfx = '{}_{}'.format(ublock, w.name.upper())
|
||||||
|
base_txt_val = "{}'h {:x}".format(aw, w.offset)
|
||||||
|
size_txt_val = "'h {:x}".format(w.size_in_bytes)
|
||||||
|
|
||||||
|
offset_type = 'logic [{}-1:0]'.format(aw_name)
|
||||||
|
size_type = 'int unsigned'
|
||||||
|
max_type_len = max(len(offset_type), len(size_type))
|
||||||
|
|
||||||
|
offset_type += ' ' * (max_type_len - len(offset_type))
|
||||||
|
size_type += ' ' * (max_type_len - len(size_type))
|
||||||
|
|
||||||
|
%>\
|
||||||
|
parameter ${offset_type} ${win_pfx}_OFFSET = ${base_txt_val};
|
||||||
|
parameter ${size_type} ${win_pfx}_SIZE = ${size_txt_val};
|
||||||
|
% endfor
|
||||||
|
% endif
|
||||||
|
</%def>\
|
||||||
|
<%def name="reg_data_for_iface(iface_name, iface_desc, for_iface, rb)">\
|
||||||
|
% if rb.flat_regs:
|
||||||
|
<%
|
||||||
|
lpfx = gen_rtl.get_type_name_pfx(block, iface_name)
|
||||||
|
upfx = lpfx.upper()
|
||||||
|
idx_len = len("{}".format(len(rb.flat_regs) - 1))
|
||||||
|
%>\
|
||||||
|
|
||||||
|
// Register index${for_iface}
|
||||||
|
typedef enum int {
|
||||||
|
% for r in rb.flat_regs:
|
||||||
|
${ublock}_${r.name.upper()}${"" if loop.last else ","}
|
||||||
|
% endfor
|
||||||
|
} ${lpfx}_id_e;
|
||||||
|
|
||||||
|
// Register width information to check illegal writes${for_iface}
|
||||||
|
parameter logic [3:0] ${upfx}_PERMIT [${len(rb.flat_regs)}] = '{
|
||||||
|
% for i, r in enumerate(rb.flat_regs):
|
||||||
|
<%
|
||||||
|
index_str = "{}".format(i).rjust(idx_len)
|
||||||
|
width = r.get_width()
|
||||||
|
if width > 24:
|
||||||
|
mask = '1111'
|
||||||
|
elif width > 16:
|
||||||
|
mask = '0111'
|
||||||
|
elif width > 8:
|
||||||
|
mask = '0011'
|
||||||
|
else:
|
||||||
|
mask = '0001'
|
||||||
|
|
||||||
|
comma = ',' if i < len(rb.flat_regs) - 1 else ' '
|
||||||
|
%>\
|
||||||
|
4'b${mask}${comma} // index[${index_str}] ${ublock}_${r.name.upper()}
|
||||||
|
% endfor
|
||||||
|
};
|
||||||
|
% endif
|
||||||
|
</%def>\
|
||||||
|
|
||||||
|
package ${lblock}_reg_pkg;
|
||||||
|
% if localparams:
|
||||||
|
|
||||||
|
// Param list
|
||||||
|
% for param in localparams:
|
||||||
|
parameter ${param.param_type} ${param.name} = ${param.value};
|
||||||
|
% endfor
|
||||||
|
% endif
|
||||||
|
|
||||||
|
// Address widths within the block
|
||||||
|
% for param_name, width in addr_widths.values():
|
||||||
|
parameter int ${param_name} = ${width};
|
||||||
|
% endfor
|
||||||
|
<%
|
||||||
|
just_default = len(block.reg_blocks) == 1 and None in block.reg_blocks
|
||||||
|
%>\
|
||||||
|
% for iface_name, rb in block.reg_blocks.items():
|
||||||
|
<%
|
||||||
|
iface_desc = iface_name or 'default'
|
||||||
|
for_iface = '' if just_default else ' for {} interface'.format(iface_desc)
|
||||||
|
%>\
|
||||||
|
${typedefs_for_iface(iface_name, iface_desc, for_iface, rb)}\
|
||||||
|
${reg2hw_for_iface(iface_name, iface_desc, for_iface, rb)}\
|
||||||
|
${hw2reg_for_iface(iface_name, iface_desc, for_iface, rb)}\
|
||||||
|
${offsets_for_iface(iface_name, iface_desc, for_iface, rb)}\
|
||||||
|
${hwext_resvals_for_iface(iface_name, iface_desc, for_iface, rb)}\
|
||||||
|
${windows_for_iface(iface_name, iface_desc, for_iface, rb)}\
|
||||||
|
${reg_data_for_iface(iface_name, iface_desc, for_iface, rb)}\
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
endpackage
|
||||||
|
|
|
@ -0,0 +1,408 @@
|
||||||
|
// Copyright lowRISC contributors.
|
||||||
|
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
//
|
||||||
|
// Register Top module auto-generated by `reggen`
|
||||||
|
<%
|
||||||
|
from reggen import gen_rtl
|
||||||
|
from reggen.access import HwAccess, SwRdAccess, SwWrAccess
|
||||||
|
from reggen.lib import get_basename
|
||||||
|
from reggen.register import Register
|
||||||
|
from reggen.multi_register import MultiRegister
|
||||||
|
|
||||||
|
num_wins = len(rb.windows)
|
||||||
|
num_wins_width = ((num_wins+1).bit_length()) - 1
|
||||||
|
num_reg_dsp = 1 if rb.all_regs else 0
|
||||||
|
num_dsp = num_wins + num_reg_dsp
|
||||||
|
regs_flat = rb.flat_regs
|
||||||
|
max_regs_char = len("{}".format(len(regs_flat) - 1))
|
||||||
|
addr_width = rb.get_addr_width()
|
||||||
|
|
||||||
|
lblock = block.name.lower()
|
||||||
|
ublock = lblock.upper()
|
||||||
|
|
||||||
|
u_mod_base = mod_base.upper()
|
||||||
|
|
||||||
|
reg2hw_t = gen_rtl.get_iface_tx_type(block, if_name, False)
|
||||||
|
hw2reg_t = gen_rtl.get_iface_tx_type(block, if_name, True)
|
||||||
|
|
||||||
|
win_array_decl = f' [{num_wins}]' if num_wins > 1 else ''
|
||||||
|
|
||||||
|
# Calculate whether we're going to need an AW parameter. We use it if there
|
||||||
|
# are any registers (obviously). We also use it if there are any windows that
|
||||||
|
# don't start at zero and end at 1 << addr_width (see the "addr_checks"
|
||||||
|
# calculation below for where that comes from).
|
||||||
|
needs_aw = (bool(regs_flat) or
|
||||||
|
num_wins > 1 or
|
||||||
|
rb.windows and (
|
||||||
|
rb.windows[0].offset != 0 or
|
||||||
|
rb.windows[0].size_in_bytes != (1 << addr_width)))
|
||||||
|
|
||||||
|
|
||||||
|
common_data_intg_gen = 0 if rb.has_data_intg_passthru else 1
|
||||||
|
adapt_data_intg_gen = 1 if rb.has_data_intg_passthru else 0
|
||||||
|
assert common_data_intg_gen != adapt_data_intg_gen
|
||||||
|
%>
|
||||||
|
|
||||||
|
module ${mod_name} (
|
||||||
|
input logic clk_i,
|
||||||
|
input logic rst_ni,
|
||||||
|
|
||||||
|
// To HW
|
||||||
|
% if rb.get_n_bits(["q","qe","re"]):
|
||||||
|
output ${lblock}_reg_pkg::${reg2hw_t} reg2hw, // Write
|
||||||
|
% endif
|
||||||
|
% if rb.get_n_bits(["d","de"]):
|
||||||
|
input ${lblock}_reg_pkg::${hw2reg_t} hw2reg, // Read
|
||||||
|
% endif
|
||||||
|
|
||||||
|
input logic reg_we,
|
||||||
|
input logic reg_re,
|
||||||
|
input logic [31:0] reg_wdata,
|
||||||
|
input logic [ 3:0] reg_be,
|
||||||
|
input logic [31:0] reg_addr,
|
||||||
|
output logic [31:0] reg_rdata
|
||||||
|
);
|
||||||
|
|
||||||
|
import ${lblock}_reg_pkg::* ;
|
||||||
|
|
||||||
|
% if needs_aw:
|
||||||
|
localparam int AW = ${addr_width};
|
||||||
|
% endif
|
||||||
|
% if rb.all_regs:
|
||||||
|
localparam int DW = ${block.regwidth};
|
||||||
|
localparam int DBW = DW/8; // Byte Width
|
||||||
|
|
||||||
|
logic reg_error;
|
||||||
|
logic addrmiss, wr_err;
|
||||||
|
|
||||||
|
logic [DW-1:0] reg_rdata_next;
|
||||||
|
% endif
|
||||||
|
|
||||||
|
% if rb.all_regs:
|
||||||
|
assign reg_rdata = reg_rdata_next;
|
||||||
|
assign reg_error = wr_err;
|
||||||
|
|
||||||
|
// Define SW related signals
|
||||||
|
// Format: <reg>_<field>_{wd|we|qs}
|
||||||
|
// or <reg>_{wd|we|qs} if field == 1 or 0
|
||||||
|
% for r in regs_flat:
|
||||||
|
${reg_sig_decl(r)}\
|
||||||
|
% for f in r.fields:
|
||||||
|
<%
|
||||||
|
fld_suff = '_' + f.name.lower() if len(r.fields) > 1 else ''
|
||||||
|
sig_name = r.name.lower() + fld_suff
|
||||||
|
%>\
|
||||||
|
${field_sig_decl(f, sig_name, r.hwext, r.shadowed)}\
|
||||||
|
% endfor
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
// Register instances
|
||||||
|
% for r in rb.all_regs:
|
||||||
|
######################## multiregister ###########################
|
||||||
|
% if isinstance(r, MultiRegister):
|
||||||
|
<%
|
||||||
|
k = 0
|
||||||
|
%>
|
||||||
|
% for sr in r.regs:
|
||||||
|
// Subregister ${k} of Multireg ${r.reg.name.lower()}
|
||||||
|
// R[${sr.name.lower()}]: V(${str(sr.hwext)})
|
||||||
|
% if len(sr.fields) == 1:
|
||||||
|
<%
|
||||||
|
f = sr.fields[0]
|
||||||
|
finst_name = sr.name.lower()
|
||||||
|
fsig_name = r.reg.name.lower() + "[%d]" % k
|
||||||
|
k = k + 1
|
||||||
|
%>
|
||||||
|
${finst_gen(sr, f, finst_name, fsig_name)}
|
||||||
|
% else:
|
||||||
|
% for f in sr.fields:
|
||||||
|
<%
|
||||||
|
finst_name = sr.name.lower() + "_" + f.name.lower()
|
||||||
|
if r.is_homogeneous():
|
||||||
|
fsig_name = r.reg.name.lower() + "[%d]" % k
|
||||||
|
k = k + 1
|
||||||
|
else:
|
||||||
|
fsig_name = r.reg.name.lower() + "[%d]" % k + "." + get_basename(f.name.lower())
|
||||||
|
%>
|
||||||
|
// F[${f.name.lower()}]: ${f.bits.msb}:${f.bits.lsb}
|
||||||
|
${finst_gen(sr, f, finst_name, fsig_name)}
|
||||||
|
% endfor
|
||||||
|
<%
|
||||||
|
if not r.is_homogeneous():
|
||||||
|
k += 1
|
||||||
|
%>
|
||||||
|
% endif
|
||||||
|
## for: mreg_flat
|
||||||
|
% endfor
|
||||||
|
######################## register with single field ###########################
|
||||||
|
% elif len(r.fields) == 1:
|
||||||
|
// R[${r.name.lower()}]: V(${str(r.hwext)})
|
||||||
|
<%
|
||||||
|
f = r.fields[0]
|
||||||
|
finst_name = r.name.lower()
|
||||||
|
fsig_name = r.name.lower()
|
||||||
|
%>
|
||||||
|
${finst_gen(r, f, finst_name, fsig_name)}
|
||||||
|
######################## register with multiple fields ###########################
|
||||||
|
% else:
|
||||||
|
// R[${r.name.lower()}]: V(${str(r.hwext)})
|
||||||
|
% for f in r.fields:
|
||||||
|
<%
|
||||||
|
finst_name = r.name.lower() + "_" + f.name.lower()
|
||||||
|
fsig_name = r.name.lower() + "." + f.name.lower()
|
||||||
|
%>
|
||||||
|
// F[${f.name.lower()}]: ${f.bits.msb}:${f.bits.lsb}
|
||||||
|
${finst_gen(r, f, finst_name, fsig_name)}
|
||||||
|
% endfor
|
||||||
|
% endif
|
||||||
|
|
||||||
|
## for: rb.all_regs
|
||||||
|
% endfor
|
||||||
|
logic [${len(regs_flat)-1}:0] addr_hit;
|
||||||
|
always_comb begin
|
||||||
|
addr_hit = '0;
|
||||||
|
% for i,r in enumerate(regs_flat):
|
||||||
|
addr_hit[${"{}".format(i).rjust(max_regs_char)}] = (reg_addr == ${ublock}_${r.name.upper()}_OFFSET);
|
||||||
|
% endfor
|
||||||
|
end
|
||||||
|
|
||||||
|
assign addrmiss = (reg_re || reg_we) ? ~|addr_hit : 1'b0 ;
|
||||||
|
|
||||||
|
% if regs_flat:
|
||||||
|
<%
|
||||||
|
# We want to signal wr_err if reg_be (the byte enable signal) is true for
|
||||||
|
# any bytes that aren't supported by a register. That's true if a
|
||||||
|
# addr_hit[i] and a bit is set in reg_be but not in *_PERMIT[i].
|
||||||
|
|
||||||
|
wr_err_terms = ['(addr_hit[{idx}] & (|({mod}_PERMIT[{idx}] & ~reg_be)))'
|
||||||
|
.format(idx=str(i).rjust(max_regs_char),
|
||||||
|
mod=u_mod_base)
|
||||||
|
for i in range(len(regs_flat))]
|
||||||
|
wr_err_expr = (' |\n' + (' ' * 15)).join(wr_err_terms)
|
||||||
|
%>\
|
||||||
|
// Check sub-word write is permitted
|
||||||
|
always_comb begin
|
||||||
|
wr_err = (reg_we &
|
||||||
|
(${wr_err_expr}));
|
||||||
|
end
|
||||||
|
% else:
|
||||||
|
assign wr_error = 1'b0;
|
||||||
|
% endif\
|
||||||
|
|
||||||
|
% for i, r in enumerate(regs_flat):
|
||||||
|
${reg_enable_gen(r, i)}\
|
||||||
|
% if len(r.fields) == 1:
|
||||||
|
${field_wd_gen(r.fields[0], r.name.lower(), r.hwext, r.shadowed, i)}\
|
||||||
|
% else:
|
||||||
|
% for f in r.fields:
|
||||||
|
${field_wd_gen(f, r.name.lower() + "_" + f.name.lower(), r.hwext, r.shadowed, i)}\
|
||||||
|
% endfor
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
// Read data return
|
||||||
|
always_comb begin
|
||||||
|
reg_rdata_next = '0;
|
||||||
|
unique case (1'b1)
|
||||||
|
% for i, r in enumerate(regs_flat):
|
||||||
|
% if len(r.fields) == 1:
|
||||||
|
addr_hit[${i}]: begin
|
||||||
|
${rdata_gen(r.fields[0], r.name.lower())}\
|
||||||
|
end
|
||||||
|
|
||||||
|
% else:
|
||||||
|
addr_hit[${i}]: begin
|
||||||
|
% for f in r.fields:
|
||||||
|
${rdata_gen(f, r.name.lower() + "_" + f.name.lower())}\
|
||||||
|
% endfor
|
||||||
|
end
|
||||||
|
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
default: begin
|
||||||
|
reg_rdata_next = '1;
|
||||||
|
end
|
||||||
|
endcase
|
||||||
|
end
|
||||||
|
% endif
|
||||||
|
|
||||||
|
// Unused signal tieoff
|
||||||
|
% if rb.all_regs:
|
||||||
|
|
||||||
|
// wdata / byte enable are not always fully used
|
||||||
|
// add a blanket unused statement to handle lint waivers
|
||||||
|
logic unused_wdata;
|
||||||
|
logic unused_be;
|
||||||
|
assign unused_wdata = ^reg_wdata;
|
||||||
|
assign unused_be = ^reg_be;
|
||||||
|
% else:
|
||||||
|
// devmode_i is not used if there are no registers
|
||||||
|
logic unused_devmode;
|
||||||
|
assign unused_devmode = ^devmode_i;
|
||||||
|
% endif
|
||||||
|
% if rb.all_regs:
|
||||||
|
|
||||||
|
% endif
|
||||||
|
endmodule
|
||||||
|
<%def name="str_bits_sv(bits)">\
|
||||||
|
% if bits.msb != bits.lsb:
|
||||||
|
${bits.msb}:${bits.lsb}\
|
||||||
|
% else:
|
||||||
|
${bits.msb}\
|
||||||
|
% endif
|
||||||
|
</%def>\
|
||||||
|
<%def name="str_arr_sv(bits)">\
|
||||||
|
% if bits.msb != bits.lsb:
|
||||||
|
[${bits.msb-bits.lsb}:0] \
|
||||||
|
% endif
|
||||||
|
</%def>\
|
||||||
|
<%def name="reg_sig_decl(reg)">\
|
||||||
|
% if reg.needs_re():
|
||||||
|
logic ${reg.name.lower()}_re;
|
||||||
|
% endif
|
||||||
|
% if reg.needs_we():
|
||||||
|
logic ${reg.name.lower()}_we;
|
||||||
|
% endif
|
||||||
|
</%def>\
|
||||||
|
<%def name="field_sig_decl(field, sig_name, hwext, shadowed)">\
|
||||||
|
% if field.swaccess.allows_read():
|
||||||
|
logic ${str_arr_sv(field.bits)}${sig_name}_qs;
|
||||||
|
% endif
|
||||||
|
% if field.swaccess.allows_write():
|
||||||
|
logic ${str_arr_sv(field.bits)}${sig_name}_wd;
|
||||||
|
% endif
|
||||||
|
</%def>\
|
||||||
|
<%def name="finst_gen(reg, field, finst_name, fsig_name)">\
|
||||||
|
<%
|
||||||
|
re_expr = f'{reg.name.lower()}_re' if field.swaccess.allows_read() else "1'b0"
|
||||||
|
|
||||||
|
if field.swaccess.allows_write():
|
||||||
|
# We usually use the REG_we signal, but use REG_re for RC fields
|
||||||
|
# (which get updated on a read, not a write)
|
||||||
|
we_suffix = 're' if field.swaccess.swrd() == SwRdAccess.RC else 'we'
|
||||||
|
we_signal = f'{reg.name.lower()}_{we_suffix}'
|
||||||
|
|
||||||
|
if reg.regwen:
|
||||||
|
we_expr = f'{we_signal} & {reg.regwen.lower()}_qs'
|
||||||
|
else:
|
||||||
|
we_expr = we_signal
|
||||||
|
wd_expr = f'{finst_name}_wd'
|
||||||
|
else:
|
||||||
|
we_expr = "1'b0"
|
||||||
|
wd_expr = "'0"
|
||||||
|
|
||||||
|
if field.hwaccess.allows_write():
|
||||||
|
de_expr = f'hw2reg.{fsig_name}.de'
|
||||||
|
d_expr = f'hw2reg.{fsig_name}.d'
|
||||||
|
else:
|
||||||
|
de_expr = "1'b0"
|
||||||
|
d_expr = "'0"
|
||||||
|
|
||||||
|
qre_expr = f'reg2hw.{fsig_name}.re' if reg.hwre or reg.shadowed else ""
|
||||||
|
|
||||||
|
if field.hwaccess.allows_read():
|
||||||
|
qe_expr = f'reg2hw.{fsig_name}.qe' if reg.hwqe else ''
|
||||||
|
q_expr = f'reg2hw.{fsig_name}.q'
|
||||||
|
else:
|
||||||
|
qe_expr = ''
|
||||||
|
q_expr = ''
|
||||||
|
|
||||||
|
qs_expr = f'{finst_name}_qs' if field.swaccess.allows_read() else ''
|
||||||
|
%>\
|
||||||
|
% if reg.hwext: ## if hwext, instantiate prim_subreg_ext
|
||||||
|
prim_subreg_ext #(
|
||||||
|
.DW (${field.bits.width()})
|
||||||
|
) u_${finst_name} (
|
||||||
|
.re (${re_expr}),
|
||||||
|
.we (${we_expr}),
|
||||||
|
.wd (${wd_expr}),
|
||||||
|
.d (${d_expr}),
|
||||||
|
.qre (${qre_expr}),
|
||||||
|
.qe (${qe_expr}),
|
||||||
|
.q (${q_expr}),
|
||||||
|
.qs (${qs_expr})
|
||||||
|
);
|
||||||
|
% else:
|
||||||
|
<%
|
||||||
|
# This isn't a field in a hwext register. Instantiate prim_subreg,
|
||||||
|
# prim_subreg_shadow or constant assign.
|
||||||
|
|
||||||
|
resval_expr = f"{field.bits.width()}'h{field.resval or 0:x}"
|
||||||
|
is_const_reg = not (field.hwaccess.allows_read() or
|
||||||
|
field.hwaccess.allows_write() or
|
||||||
|
field.swaccess.allows_write() or
|
||||||
|
field.swaccess.swrd() != SwRdAccess.RD)
|
||||||
|
|
||||||
|
subreg_block = 'prim_subreg' + ('_shadowed' if reg.shadowed else '')
|
||||||
|
%>\
|
||||||
|
% if is_const_reg:
|
||||||
|
// constant-only read
|
||||||
|
assign ${finst_name}_qs = ${resval_expr};
|
||||||
|
% else:
|
||||||
|
${subreg_block} #(
|
||||||
|
.DW (${field.bits.width()}),
|
||||||
|
.SWACCESS("${field.swaccess.value[1].name.upper()}"),
|
||||||
|
.RESVAL (${resval_expr})
|
||||||
|
) u_${finst_name} (
|
||||||
|
.clk_i (clk_i),
|
||||||
|
.rst_ni (rst_ni),
|
||||||
|
|
||||||
|
// from register interface
|
||||||
|
% if reg.shadowed:
|
||||||
|
.re (${re_expr}),
|
||||||
|
% endif
|
||||||
|
.we (${we_expr}),
|
||||||
|
.wd (${wd_expr}),
|
||||||
|
|
||||||
|
// from internal hardware
|
||||||
|
.de (${de_expr}),
|
||||||
|
.d (${d_expr}),
|
||||||
|
|
||||||
|
// to internal hardware
|
||||||
|
.qe (${qe_expr}),
|
||||||
|
.q (${q_expr}),
|
||||||
|
|
||||||
|
// to register interface (read)
|
||||||
|
% if not reg.shadowed:
|
||||||
|
.qs (${qs_expr})
|
||||||
|
% else:
|
||||||
|
.qs (${qs_expr}),
|
||||||
|
|
||||||
|
// Shadow register error conditions
|
||||||
|
.err_update (reg2hw.${fsig_name}.err_update),
|
||||||
|
.err_storage (reg2hw.${fsig_name}.err_storage)
|
||||||
|
% endif
|
||||||
|
);
|
||||||
|
% endif ## end non-constant prim_subreg
|
||||||
|
% endif
|
||||||
|
</%def>\
|
||||||
|
<%def name="reg_enable_gen(reg, idx)">\
|
||||||
|
% if reg.needs_re():
|
||||||
|
assign ${reg.name.lower()}_re = addr_hit[${idx}] & reg_re & !reg_error;
|
||||||
|
% endif
|
||||||
|
% if reg.needs_we():
|
||||||
|
assign ${reg.name.lower()}_we = addr_hit[${idx}] & reg_we & !reg_error;
|
||||||
|
% endif
|
||||||
|
</%def>\
|
||||||
|
<%def name="field_wd_gen(field, sig_name, hwext, shadowed, idx)">\
|
||||||
|
<%
|
||||||
|
needs_wd = field.swaccess.allows_write()
|
||||||
|
space = '\n' if needs_wd or needs_re else ''
|
||||||
|
%>\
|
||||||
|
${space}\
|
||||||
|
% if needs_wd:
|
||||||
|
% if field.swaccess.swrd() == SwRdAccess.RC:
|
||||||
|
assign ${sig_name}_wd = '1;
|
||||||
|
% else:
|
||||||
|
assign ${sig_name}_wd = reg_wdata[${str_bits_sv(field.bits)}];
|
||||||
|
% endif
|
||||||
|
% endif
|
||||||
|
</%def>\
|
||||||
|
<%def name="rdata_gen(field, sig_name)">\
|
||||||
|
% if field.swaccess.allows_read():
|
||||||
|
reg_rdata_next[${str_bits_sv(field.bits)}] = ${sig_name}_qs;
|
||||||
|
% else:
|
||||||
|
reg_rdata_next[${str_bits_sv(field.bits)}] = '0;
|
||||||
|
% endif
|
||||||
|
</%def>\
|
|
@ -0,0 +1,452 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
from typing import Dict, List, Optional
|
||||||
|
|
||||||
|
from .access import SWAccess, HWAccess
|
||||||
|
from .field import Field
|
||||||
|
from .lib import (check_keys, check_str, check_name, check_bool,
|
||||||
|
check_list, check_str_list, check_int)
|
||||||
|
from .params import ReggenParams
|
||||||
|
from .reg_base import RegBase
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
REQUIRED_FIELDS = {
|
||||||
|
'name': ['s', "name of the register"],
|
||||||
|
'desc': ['t', "description of the register"],
|
||||||
|
'fields': ['l', "list of register field description groups"]
|
||||||
|
}
|
||||||
|
|
||||||
|
OPTIONAL_FIELDS = {
|
||||||
|
'swaccess': [
|
||||||
|
's',
|
||||||
|
"software access permission to use for "
|
||||||
|
"fields that don't specify swaccess"
|
||||||
|
],
|
||||||
|
'hwaccess': [
|
||||||
|
's',
|
||||||
|
"hardware access permission to use for "
|
||||||
|
"fields that don't specify hwaccess"
|
||||||
|
],
|
||||||
|
'hwext': [
|
||||||
|
's',
|
||||||
|
"'true' if the register is stored outside "
|
||||||
|
"of the register module"
|
||||||
|
],
|
||||||
|
'hwqe': [
|
||||||
|
's',
|
||||||
|
"'true' if hardware uses 'q' enable signal, "
|
||||||
|
"which is latched signal of software write pulse."
|
||||||
|
],
|
||||||
|
'hwre': [
|
||||||
|
's',
|
||||||
|
"'true' if hardware uses 're' signal, "
|
||||||
|
"which is latched signal of software read pulse."
|
||||||
|
],
|
||||||
|
'regwen': [
|
||||||
|
's',
|
||||||
|
"if register is write-protected by another register, that "
|
||||||
|
"register name should be given here. empty-string for no register "
|
||||||
|
"write protection"
|
||||||
|
],
|
||||||
|
'resval': [
|
||||||
|
'd',
|
||||||
|
"reset value of full register (default 0)"
|
||||||
|
],
|
||||||
|
'tags': [
|
||||||
|
's',
|
||||||
|
"tags for the register, following the format 'tag_name:item1:item2...'"
|
||||||
|
],
|
||||||
|
'shadowed': [
|
||||||
|
's',
|
||||||
|
"'true' if the register is shadowed"
|
||||||
|
],
|
||||||
|
'update_err_alert': [
|
||||||
|
's',
|
||||||
|
"alert that will be triggered if "
|
||||||
|
"this shadowed register has update error"
|
||||||
|
],
|
||||||
|
'storage_err_alert': [
|
||||||
|
's',
|
||||||
|
"alert that will be triggered if "
|
||||||
|
"this shadowed register has storage error"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class Register(RegBase):
|
||||||
|
'''Code representing a register for reggen'''
|
||||||
|
def __init__(self,
|
||||||
|
offset: int,
|
||||||
|
name: str,
|
||||||
|
desc: str,
|
||||||
|
hwext: bool,
|
||||||
|
hwqe: bool,
|
||||||
|
hwre: bool,
|
||||||
|
regwen: Optional[str],
|
||||||
|
tags: List[str],
|
||||||
|
resval: Optional[int],
|
||||||
|
shadowed: bool,
|
||||||
|
fields: List[Field],
|
||||||
|
update_err_alert: Optional[str],
|
||||||
|
storage_err_alert: Optional[str]):
|
||||||
|
super().__init__(offset)
|
||||||
|
self.name = name
|
||||||
|
self.desc = desc
|
||||||
|
self.hwext = hwext
|
||||||
|
self.hwqe = hwqe
|
||||||
|
self.hwre = hwre
|
||||||
|
if self.hwre and not self.hwext:
|
||||||
|
raise ValueError('The {} register specifies hwre but not hwext.'
|
||||||
|
.format(self.name))
|
||||||
|
|
||||||
|
self.regwen = regwen
|
||||||
|
self.tags = tags
|
||||||
|
|
||||||
|
self.shadowed = shadowed
|
||||||
|
pattern = r'^[a-z0-9_]+_shadowed(?:_[0-9]+)?'
|
||||||
|
sounds_shadowy = re.match(pattern, self.name.lower())
|
||||||
|
if self.shadowed and not sounds_shadowy:
|
||||||
|
raise ValueError("Register {} has the shadowed flag but its name "
|
||||||
|
"doesn't end with the _shadowed suffix."
|
||||||
|
.format(self.name))
|
||||||
|
elif sounds_shadowy and not self.shadowed:
|
||||||
|
raise ValueError("Register {} has a name ending in _shadowed, but "
|
||||||
|
"the shadowed flag is not set."
|
||||||
|
.format(self.name))
|
||||||
|
|
||||||
|
# Take a copy of fields and then sort by bit index
|
||||||
|
assert fields
|
||||||
|
self.fields = fields.copy()
|
||||||
|
self.fields.sort(key=lambda field: field.bits.lsb)
|
||||||
|
|
||||||
|
# Index fields by name and check for duplicates
|
||||||
|
self.name_to_field = {} # type: Dict[str, Field]
|
||||||
|
for field in self.fields:
|
||||||
|
if field.name in self.name_to_field:
|
||||||
|
raise ValueError('Register {} has duplicate fields called {}.'
|
||||||
|
.format(self.name, field.name))
|
||||||
|
self.name_to_field[field.name] = field
|
||||||
|
|
||||||
|
# Check that fields have compatible access types if we are hwext
|
||||||
|
if self.hwext:
|
||||||
|
for field in self.fields:
|
||||||
|
if field.hwaccess.key == 'hro' and field.sw_readable():
|
||||||
|
raise ValueError('The {} register has hwext set, but '
|
||||||
|
'field {} has hro hwaccess and the '
|
||||||
|
'field value is readable by software '
|
||||||
|
'mode ({}).'
|
||||||
|
.format(self.name,
|
||||||
|
field.name,
|
||||||
|
field.swaccess.key))
|
||||||
|
if not self.hwqe and field.sw_writable():
|
||||||
|
raise ValueError('The {} register has hwext set and field '
|
||||||
|
'{} is writable by software (mode {}), '
|
||||||
|
'so the register must also enable hwqe.'
|
||||||
|
.format(self.name,
|
||||||
|
field.name,
|
||||||
|
field.swaccess.key))
|
||||||
|
|
||||||
|
# Check that field bits are disjoint
|
||||||
|
bits_used = 0
|
||||||
|
for field in self.fields:
|
||||||
|
field_mask = field.bits.bitmask()
|
||||||
|
if bits_used & field_mask:
|
||||||
|
raise ValueError('Register {} has non-disjoint fields: '
|
||||||
|
'{} uses bits {:#x} used by other fields.'
|
||||||
|
.format(self.name, field.name,
|
||||||
|
bits_used & field_mask))
|
||||||
|
|
||||||
|
# Compute a reset value and mask from our constituent fields.
|
||||||
|
self.resval = 0
|
||||||
|
self.resmask = 0
|
||||||
|
for field in self.fields:
|
||||||
|
self.resval |= (field.resval or 0) << field.bits.lsb
|
||||||
|
self.resmask |= field.bits.bitmask()
|
||||||
|
|
||||||
|
# If the register defined a reset value, make sure it matches. We've
|
||||||
|
# already checked that each field matches, but we still need to make
|
||||||
|
# sure there weren't any bits unaccounted for.
|
||||||
|
if resval is not None and self.resval != resval:
|
||||||
|
raise ValueError('Register {} specifies a reset value of {:#x} but '
|
||||||
|
'collecting reset values across its fields yields '
|
||||||
|
'{:#x}.'
|
||||||
|
.format(self.name, resval, self.resval))
|
||||||
|
|
||||||
|
self.update_err_alert = update_err_alert
|
||||||
|
self.storage_err_alert = storage_err_alert
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_raw(reg_width: int,
|
||||||
|
offset: int,
|
||||||
|
params: ReggenParams,
|
||||||
|
raw: object) -> 'Register':
|
||||||
|
rd = check_keys(raw, 'register',
|
||||||
|
list(REQUIRED_FIELDS.keys()),
|
||||||
|
list(OPTIONAL_FIELDS.keys()))
|
||||||
|
|
||||||
|
name = check_name(rd['name'], 'name of register')
|
||||||
|
desc = check_str(rd['desc'], 'desc for {} register'.format(name))
|
||||||
|
|
||||||
|
swaccess = SWAccess('{} register'.format(name),
|
||||||
|
rd.get('swaccess', 'none'))
|
||||||
|
hwaccess = HWAccess('{} register'.format(name),
|
||||||
|
rd.get('hwaccess', 'hro'))
|
||||||
|
|
||||||
|
hwext = check_bool(rd.get('hwext', False),
|
||||||
|
'hwext flag for {} register'.format(name))
|
||||||
|
|
||||||
|
hwqe = check_bool(rd.get('hwqe', False),
|
||||||
|
'hwqe flag for {} register'.format(name))
|
||||||
|
|
||||||
|
hwre = check_bool(rd.get('hwre', False),
|
||||||
|
'hwre flag for {} register'.format(name))
|
||||||
|
|
||||||
|
raw_regwen = rd.get('regwen', '')
|
||||||
|
if not raw_regwen:
|
||||||
|
regwen = None
|
||||||
|
else:
|
||||||
|
regwen = check_name(raw_regwen,
|
||||||
|
'regwen for {} register'.format(name))
|
||||||
|
|
||||||
|
tags = check_str_list(rd.get('tags', []),
|
||||||
|
'tags for {} register'.format(name))
|
||||||
|
|
||||||
|
raw_resval = rd.get('resval')
|
||||||
|
if raw_resval is None:
|
||||||
|
resval = None
|
||||||
|
else:
|
||||||
|
resval = check_int(raw_resval,
|
||||||
|
'resval for {} register'.format(name))
|
||||||
|
if not 0 <= resval < (1 << reg_width):
|
||||||
|
raise ValueError('resval for {} register is {}, '
|
||||||
|
'not an unsigned {}-bit number.'
|
||||||
|
.format(name, resval, reg_width))
|
||||||
|
|
||||||
|
shadowed = check_bool(rd.get('shadowed', False),
|
||||||
|
'shadowed flag for {} register'
|
||||||
|
.format(name))
|
||||||
|
|
||||||
|
raw_fields = check_list(rd['fields'],
|
||||||
|
'fields for {} register'.format(name))
|
||||||
|
if not raw_fields:
|
||||||
|
raise ValueError('Register {} has no fields.'.format(name))
|
||||||
|
fields = [Field.from_raw(name,
|
||||||
|
idx,
|
||||||
|
len(raw_fields),
|
||||||
|
swaccess,
|
||||||
|
hwaccess,
|
||||||
|
resval,
|
||||||
|
reg_width,
|
||||||
|
params,
|
||||||
|
rf)
|
||||||
|
for idx, rf in enumerate(raw_fields)]
|
||||||
|
|
||||||
|
raw_uea = rd.get('update_err_alert')
|
||||||
|
if raw_uea is None:
|
||||||
|
update_err_alert = None
|
||||||
|
else:
|
||||||
|
update_err_alert = check_name(raw_uea,
|
||||||
|
'update_err_alert for {} register'
|
||||||
|
.format(name))
|
||||||
|
|
||||||
|
raw_sea = rd.get('storage_err_alert')
|
||||||
|
if raw_sea is None:
|
||||||
|
storage_err_alert = None
|
||||||
|
else:
|
||||||
|
storage_err_alert = check_name(raw_sea,
|
||||||
|
'storage_err_alert for {} register'
|
||||||
|
.format(name))
|
||||||
|
|
||||||
|
return Register(offset, name, desc,
|
||||||
|
hwext, hwqe, hwre, regwen,
|
||||||
|
tags, resval, shadowed, fields,
|
||||||
|
update_err_alert, storage_err_alert)
|
||||||
|
|
||||||
|
def next_offset(self, addrsep: int) -> int:
|
||||||
|
return self.offset + addrsep
|
||||||
|
|
||||||
|
def get_n_bits(self, bittype: List[str]) -> int:
|
||||||
|
return sum(field.get_n_bits(self.hwext, self.hwqe, self.hwre, bittype)
|
||||||
|
for field in self.fields)
|
||||||
|
|
||||||
|
def get_field_list(self) -> List[Field]:
|
||||||
|
return self.fields
|
||||||
|
|
||||||
|
def is_homogeneous(self) -> bool:
|
||||||
|
return len(self.fields) == 1
|
||||||
|
|
||||||
|
def is_hw_writable(self) -> bool:
|
||||||
|
'''Returns true if any field in this register can be modified by HW'''
|
||||||
|
for fld in self.fields:
|
||||||
|
if fld.hwaccess.allows_write():
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_width(self) -> int:
|
||||||
|
'''Get the width of the fields in the register in bits
|
||||||
|
|
||||||
|
This counts dead space between and below fields, so it's calculated as
|
||||||
|
one more than the highest msb.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# self.fields is ordered by (increasing) LSB, so we can find the MSB of
|
||||||
|
# the register by taking the MSB of the last field.
|
||||||
|
return 1 + self.fields[-1].bits.msb
|
||||||
|
|
||||||
|
def needs_we(self) -> bool:
|
||||||
|
'''Return true if at least one field needs a write-enable'''
|
||||||
|
for fld in self.fields:
|
||||||
|
if fld.swaccess.needs_we():
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def needs_re(self) -> bool:
|
||||||
|
'''Return true if at least one field needs a read-enable
|
||||||
|
|
||||||
|
This is true if any of the following are true:
|
||||||
|
|
||||||
|
- The register is shadowed (because shadow registers need to know
|
||||||
|
about reads)
|
||||||
|
|
||||||
|
- There's an RC field (where we'll attach the read-enable signal to
|
||||||
|
the subreg's we port)
|
||||||
|
|
||||||
|
- The register is hwext and allows reads (in which case the hardware
|
||||||
|
side might need the re signal)
|
||||||
|
|
||||||
|
'''
|
||||||
|
if self.shadowed:
|
||||||
|
return True
|
||||||
|
|
||||||
|
for fld in self.fields:
|
||||||
|
if fld.swaccess.key == 'rc':
|
||||||
|
return True
|
||||||
|
|
||||||
|
if self.hwext and fld.swaccess.allows_read():
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def make_multi(self,
|
||||||
|
reg_width: int,
|
||||||
|
offset: int,
|
||||||
|
creg_idx: int,
|
||||||
|
creg_count: int,
|
||||||
|
regwen_multi: bool,
|
||||||
|
compact: bool,
|
||||||
|
min_reg_idx: int,
|
||||||
|
max_reg_idx: int,
|
||||||
|
cname: str) -> 'Register':
|
||||||
|
'''Generate a numbered, packed version of the register'''
|
||||||
|
assert 0 <= creg_idx < creg_count
|
||||||
|
assert 0 <= min_reg_idx <= max_reg_idx
|
||||||
|
assert compact or (min_reg_idx == max_reg_idx)
|
||||||
|
|
||||||
|
new_name = ('{}_{}'.format(self.name, creg_idx)
|
||||||
|
if creg_count > 1
|
||||||
|
else self.name)
|
||||||
|
|
||||||
|
if self.regwen is None or not regwen_multi or creg_count == 1:
|
||||||
|
new_regwen = self.regwen
|
||||||
|
else:
|
||||||
|
new_regwen = '{}_{}'.format(self.regwen, creg_idx)
|
||||||
|
|
||||||
|
strip_field = creg_idx > 0
|
||||||
|
|
||||||
|
if compact:
|
||||||
|
# Compacting multiple registers into a single "compacted" register.
|
||||||
|
# This is only supported if we have exactly one field (checked at
|
||||||
|
# the call-site)
|
||||||
|
assert len(self.fields) == 1
|
||||||
|
new_fields = self.fields[0].make_multi(reg_width,
|
||||||
|
min_reg_idx, max_reg_idx,
|
||||||
|
cname, creg_idx,
|
||||||
|
strip_field)
|
||||||
|
else:
|
||||||
|
# No compacting going on, but we still choose to rename the fields
|
||||||
|
# to match the registers
|
||||||
|
assert creg_idx == min_reg_idx
|
||||||
|
new_fields = [field.make_suffixed('_{}'.format(creg_idx),
|
||||||
|
cname, creg_idx, strip_field)
|
||||||
|
for field in self.fields]
|
||||||
|
|
||||||
|
# Don't specify a reset value for the new register. Any reset value
|
||||||
|
# defined for the original register will have propagated to its fields,
|
||||||
|
# so when we combine them here, the Register constructor can compute a
|
||||||
|
# reset value for us (which might well be different from self.resval if
|
||||||
|
# we've replicated fields).
|
||||||
|
new_resval = None
|
||||||
|
|
||||||
|
return Register(offset, new_name, self.desc,
|
||||||
|
self.hwext, self.hwqe, self.hwre, new_regwen,
|
||||||
|
self.tags, new_resval, self.shadowed, new_fields,
|
||||||
|
self.update_err_alert, self.storage_err_alert)
|
||||||
|
|
||||||
|
def check_valid_regwen(self) -> None:
|
||||||
|
'''Check that this register is valid for use as a REGWEN'''
|
||||||
|
# A REGWEN register should have a single field that's just bit zero.
|
||||||
|
if len(self.fields) != 1:
|
||||||
|
raise ValueError('One or more registers use {} as a '
|
||||||
|
'write-enable so it should have exactly one '
|
||||||
|
'field. It actually has {}.'
|
||||||
|
.format(self.name, len(self.fields)))
|
||||||
|
|
||||||
|
wen_fld = self.fields[0]
|
||||||
|
if wen_fld.bits.width() != 1:
|
||||||
|
raise ValueError('One or more registers use {} as a '
|
||||||
|
'write-enable so its field should be 1 bit wide, '
|
||||||
|
'not {}.'
|
||||||
|
.format(self.name, wen_fld.bits.width()))
|
||||||
|
if wen_fld.bits.lsb != 0:
|
||||||
|
raise ValueError('One or more registers use {} as a '
|
||||||
|
'write-enable so its field should have LSB 0, '
|
||||||
|
'not {}.'
|
||||||
|
.format(self.name, wen_fld.bits.lsb))
|
||||||
|
|
||||||
|
# If the REGWEN bit is SW controlled, check that the register
|
||||||
|
# defaults to enabled. If this bit is read-only by SW and hence
|
||||||
|
# hardware controlled, we do not enforce this requirement.
|
||||||
|
if wen_fld.swaccess.key != "ro" and not self.resval:
|
||||||
|
raise ValueError('One or more registers use {} as a '
|
||||||
|
'write-enable. Since it is SW-controlled '
|
||||||
|
'it should have a nonzero reset value.'
|
||||||
|
.format(self.name))
|
||||||
|
|
||||||
|
if wen_fld.swaccess.key == "rw0c":
|
||||||
|
# The register is software managed: all good!
|
||||||
|
return
|
||||||
|
|
||||||
|
if wen_fld.swaccess.key == "ro" and wen_fld.hwaccess.key == "hwo":
|
||||||
|
# The register is hardware managed: that's fine too.
|
||||||
|
return
|
||||||
|
|
||||||
|
raise ValueError('One or more registers use {} as a write-enable. '
|
||||||
|
'However, its field has invalid access permissions '
|
||||||
|
'({} / {}). It should either have swaccess=RW0C '
|
||||||
|
'or have swaccess=RO and hwaccess=HWO.'
|
||||||
|
.format(self.name,
|
||||||
|
wen_fld.swaccess.key,
|
||||||
|
wen_fld.hwaccess.key))
|
||||||
|
|
||||||
|
def _asdict(self) -> Dict[str, object]:
|
||||||
|
rd = {
|
||||||
|
'name': self.name,
|
||||||
|
'desc': self.desc,
|
||||||
|
'fields': self.fields,
|
||||||
|
'hwext': str(self.hwext),
|
||||||
|
'hwqe': str(self.hwqe),
|
||||||
|
'hwre': str(self.hwre),
|
||||||
|
'tags': self.tags,
|
||||||
|
'shadowed': str(self.shadowed),
|
||||||
|
}
|
||||||
|
if self.regwen is not None:
|
||||||
|
rd['regwen'] = self.regwen
|
||||||
|
if self.update_err_alert is not None:
|
||||||
|
rd['update_err_alert'] = self.update_err_alert
|
||||||
|
if self.storage_err_alert is not None:
|
||||||
|
rd['storage_err_alert'] = self.storage_err_alert
|
||||||
|
|
||||||
|
return rd
|
|
@ -0,0 +1,63 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
from typing import Dict, Sequence
|
||||||
|
|
||||||
|
from .bits import Bits
|
||||||
|
from .lib import check_keys, check_name, check_str, check_int, check_list
|
||||||
|
|
||||||
|
|
||||||
|
class Signal:
|
||||||
|
def __init__(self, name: str, desc: str, bits: Bits):
|
||||||
|
self.name = name
|
||||||
|
self.desc = desc
|
||||||
|
self.bits = bits
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_raw(what: str, lsb: int, raw: object) -> 'Signal':
|
||||||
|
rd = check_keys(raw, what,
|
||||||
|
['name', 'desc'],
|
||||||
|
['width'])
|
||||||
|
|
||||||
|
name = check_name(rd['name'], 'name field of ' + what)
|
||||||
|
desc = check_str(rd['desc'], 'desc field of ' + what)
|
||||||
|
width = check_int(rd.get('width', 1), 'width field of ' + what)
|
||||||
|
if width <= 0:
|
||||||
|
raise ValueError('The width field of signal {} ({}) '
|
||||||
|
'has value {}, but should be positive.'
|
||||||
|
.format(name, what, width))
|
||||||
|
|
||||||
|
bits = Bits(lsb + width - 1, lsb)
|
||||||
|
|
||||||
|
return Signal(name, desc, bits)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_raw_list(what: str, raw: object) -> Sequence['Signal']:
|
||||||
|
lsb = 0
|
||||||
|
ret = []
|
||||||
|
for idx, entry in enumerate(check_list(raw, what)):
|
||||||
|
entry_what = 'entry {} of {}'.format(idx, what)
|
||||||
|
interrupt = Signal.from_raw(entry_what, lsb, entry)
|
||||||
|
ret.append(interrupt)
|
||||||
|
lsb += interrupt.bits.width()
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def _asdict(self) -> Dict[str, object]:
|
||||||
|
return {
|
||||||
|
'name': self.name,
|
||||||
|
'desc': self.desc,
|
||||||
|
'width': str(self.bits.width())
|
||||||
|
}
|
||||||
|
|
||||||
|
def as_nwt_dict(self, type_field: str) -> Dict[str, object]:
|
||||||
|
'''Return a view of the signal as a dictionary
|
||||||
|
|
||||||
|
The dictionary has fields "name", "width" and "type", the last
|
||||||
|
of which comes from the type_field argument. Used for topgen
|
||||||
|
integration.
|
||||||
|
|
||||||
|
'''
|
||||||
|
return {'name': self.name,
|
||||||
|
'width': self.bits.width(),
|
||||||
|
'type': type_field}
|
|
@ -0,0 +1,14 @@
|
||||||
|
// Copyright lowRISC contributors.
|
||||||
|
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
// UVM Registers auto-generated by `reggen` containing data structure
|
||||||
|
##
|
||||||
|
##
|
||||||
|
## We use functions from uvm_reg_base.sv.tpl to define
|
||||||
|
## per-device-interface code.
|
||||||
|
##
|
||||||
|
<%namespace file="uvm_reg_base.sv.tpl" import="*"/>\
|
||||||
|
##
|
||||||
|
##
|
||||||
|
${make_ral_pkg(dv_base_prefix, block.regwidth, reg_block_path, rb, esc_if_name)}
|
|
@ -0,0 +1,564 @@
|
||||||
|
// Copyright lowRISC contributors.
|
||||||
|
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
<%!
|
||||||
|
from reggen import gen_dv
|
||||||
|
from reggen.access import HwAccess, SwRdAccess, SwWrAccess
|
||||||
|
from reggen.multi_register import MultiRegister
|
||||||
|
from reggen.register import Register
|
||||||
|
from typing import Dict
|
||||||
|
|
||||||
|
# Get a list reg and its instance name
|
||||||
|
# For single reg, return Dict[reg_inst:reg]
|
||||||
|
# For multireg, if it's dv_compact, return Dict[mr.name[idx]:mr.reg],
|
||||||
|
# if not, return all the mr.regs with their name
|
||||||
|
def get_inst_to_reg_dict(r) -> Dict:
|
||||||
|
inst_regs = {} # type: Dict[inst_name:Register]
|
||||||
|
if isinstance(r, MultiRegister):
|
||||||
|
if r.dv_compact:
|
||||||
|
inst_base = r.reg.name.lower()
|
||||||
|
for idx, reg in enumerate(r.regs):
|
||||||
|
inst_name = f'{inst_base}[{idx}]' if len(r.regs) > 1 else inst_base
|
||||||
|
inst_regs[inst_name] = reg
|
||||||
|
else:
|
||||||
|
for r0 in r.regs:
|
||||||
|
inst_regs[r0.name] = r0
|
||||||
|
else:
|
||||||
|
inst_regs[r.name.lower()] = r
|
||||||
|
return inst_regs
|
||||||
|
%>\
|
||||||
|
##
|
||||||
|
##
|
||||||
|
## make_ral_pkg
|
||||||
|
## ============
|
||||||
|
##
|
||||||
|
## Generate the RAL package for a device interface.
|
||||||
|
##
|
||||||
|
## dv_base_prefix a string naming the base register type. If it is FOO,
|
||||||
|
## then we will inherit from FOO_reg (assumed to
|
||||||
|
## be a subclass of uvm_reg).
|
||||||
|
##
|
||||||
|
## reg_width an integer giving the width of registers in bits
|
||||||
|
##
|
||||||
|
## reg_block_path the hierarchical path to the relevant register block in the
|
||||||
|
## design
|
||||||
|
##
|
||||||
|
## rb a RegBlock object
|
||||||
|
##
|
||||||
|
## esc_if_name a string giving the full, escaped, interface name. For
|
||||||
|
## a device interface called FOO on block BAR,
|
||||||
|
## this will be bar__foo. For an unnamed interface
|
||||||
|
## on block BAR, this will be just bar.
|
||||||
|
##
|
||||||
|
<%def name="make_ral_pkg(dv_base_prefix, reg_width, reg_block_path, rb, esc_if_name)">\
|
||||||
|
package ${esc_if_name}_ral_pkg;
|
||||||
|
${make_ral_pkg_hdr(dv_base_prefix, [])}
|
||||||
|
|
||||||
|
${make_ral_pkg_fwd_decls(esc_if_name, rb.type_regs, rb.windows)}
|
||||||
|
% for r in rb.all_regs:
|
||||||
|
<%
|
||||||
|
if isinstance(r, MultiRegister):
|
||||||
|
reg = r.reg
|
||||||
|
if r.dv_compact:
|
||||||
|
reg.fields = r.regs[0].fields
|
||||||
|
regs = [reg]
|
||||||
|
else:
|
||||||
|
regs = r.regs
|
||||||
|
else:
|
||||||
|
regs = [r]
|
||||||
|
%>\
|
||||||
|
% for reg in regs:
|
||||||
|
|
||||||
|
${make_ral_pkg_reg_class(dv_base_prefix, reg_width, esc_if_name, reg_block_path, reg)}
|
||||||
|
% endfor
|
||||||
|
% endfor
|
||||||
|
% for window in rb.windows:
|
||||||
|
|
||||||
|
${make_ral_pkg_window_class(dv_base_prefix, esc_if_name, window)}
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
<%
|
||||||
|
reg_block_name = gen_dv.bcname(esc_if_name)
|
||||||
|
%>\
|
||||||
|
class ${reg_block_name} extends ${dv_base_prefix}_reg_block;
|
||||||
|
% if rb.flat_regs:
|
||||||
|
// registers
|
||||||
|
% for r in rb.all_regs:
|
||||||
|
<%
|
||||||
|
if isinstance(r, MultiRegister):
|
||||||
|
if r.dv_compact:
|
||||||
|
regs = [r.reg]
|
||||||
|
count = len(r.regs)
|
||||||
|
else:
|
||||||
|
regs = r.regs
|
||||||
|
count = 1
|
||||||
|
else:
|
||||||
|
regs = [r]
|
||||||
|
count = 1
|
||||||
|
%>\
|
||||||
|
% for r0 in regs:
|
||||||
|
<%
|
||||||
|
reg_type = gen_dv.rcname(esc_if_name, r0)
|
||||||
|
inst_name = r0.name.lower()
|
||||||
|
inst_decl = f'{inst_name}[{count}]' if count > 1 else inst_name
|
||||||
|
%>\
|
||||||
|
rand ${reg_type} ${inst_decl};
|
||||||
|
% endfor
|
||||||
|
% endfor
|
||||||
|
% endif
|
||||||
|
% if rb.windows:
|
||||||
|
// memories
|
||||||
|
% for window in rb.windows:
|
||||||
|
rand ${gen_dv.mcname(esc_if_name, window)} ${gen_dv.miname(window)};
|
||||||
|
% endfor
|
||||||
|
% endif
|
||||||
|
|
||||||
|
`uvm_object_utils(${reg_block_name})
|
||||||
|
|
||||||
|
function new(string name = "${reg_block_name}",
|
||||||
|
int has_coverage = UVM_NO_COVERAGE);
|
||||||
|
super.new(name, has_coverage);
|
||||||
|
endfunction : new
|
||||||
|
|
||||||
|
virtual function void build(uvm_reg_addr_t base_addr,
|
||||||
|
csr_excl_item csr_excl = null);
|
||||||
|
// create default map
|
||||||
|
this.default_map = create_map(.name("default_map"),
|
||||||
|
.base_addr(base_addr),
|
||||||
|
.n_bytes(${reg_width//8}),
|
||||||
|
.endian(UVM_LITTLE_ENDIAN));
|
||||||
|
if (csr_excl == null) begin
|
||||||
|
csr_excl = csr_excl_item::type_id::create("csr_excl");
|
||||||
|
this.csr_excl = csr_excl;
|
||||||
|
end
|
||||||
|
% if rb.flat_regs:
|
||||||
|
set_hdl_path_root("tb.dut", "BkdrRegPathRtl");
|
||||||
|
set_hdl_path_root("tb.dut", "BkdrRegPathRtlCommitted");
|
||||||
|
set_hdl_path_root("tb.dut", "BkdrRegPathRtlShadow");
|
||||||
|
// create registers
|
||||||
|
% for r in rb.all_regs:
|
||||||
|
<%
|
||||||
|
r0 = r.reg if isinstance(r, MultiRegister) else r
|
||||||
|
reg_type = gen_dv.rcname(esc_if_name, r0)
|
||||||
|
%>\
|
||||||
|
% if isinstance(r, MultiRegister):
|
||||||
|
% for idx, reg in enumerate(r.regs):
|
||||||
|
<%
|
||||||
|
if r.dv_compact:
|
||||||
|
inst_base = r0.name.lower()
|
||||||
|
inst_name = f'{inst_base}[{idx}]' if len(r.regs) > 1 else inst_base
|
||||||
|
else:
|
||||||
|
inst_name = reg.name.lower()
|
||||||
|
reg_type = gen_dv.rcname(esc_if_name, reg)
|
||||||
|
%>\
|
||||||
|
${instantiate_register(reg_width, reg_block_path, reg, reg_type, inst_name)}\
|
||||||
|
% endfor
|
||||||
|
% else:
|
||||||
|
${instantiate_register(reg_width, reg_block_path, r, reg_type, r.name.lower())}\
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
<%
|
||||||
|
any_regwen = False
|
||||||
|
for r in rb.flat_regs:
|
||||||
|
if r.regwen:
|
||||||
|
any_regwen = True
|
||||||
|
break
|
||||||
|
%>\
|
||||||
|
% if any_regwen:
|
||||||
|
// assign locked reg to its regwen reg
|
||||||
|
% for r in rb.all_regs:
|
||||||
|
% for inst, reg in get_inst_to_reg_dict(r).items():
|
||||||
|
${apply_regwen(rb, reg, inst)}\
|
||||||
|
% endfor
|
||||||
|
% endfor
|
||||||
|
% endif
|
||||||
|
% endif
|
||||||
|
${make_ral_pkg_window_instances(reg_width, esc_if_name, rb)}
|
||||||
|
endfunction : build
|
||||||
|
endclass : ${reg_block_name}
|
||||||
|
|
||||||
|
endpackage
|
||||||
|
</%def>\
|
||||||
|
##
|
||||||
|
##
|
||||||
|
## make_ral_pkg_hdr
|
||||||
|
## ================
|
||||||
|
##
|
||||||
|
## Generate the header for a RAL package
|
||||||
|
##
|
||||||
|
## dv_base_prefix as for make_ral_pkg
|
||||||
|
##
|
||||||
|
## deps a list of names for packages that should be explicitly
|
||||||
|
## imported
|
||||||
|
##
|
||||||
|
<%def name="make_ral_pkg_hdr(dv_base_prefix, deps)">\
|
||||||
|
// dep packages
|
||||||
|
import uvm_pkg::*;
|
||||||
|
import dv_base_reg_pkg::*;
|
||||||
|
% if dv_base_prefix != "dv_base":
|
||||||
|
import ${dv_base_prefix}_reg_pkg::*;
|
||||||
|
% endif
|
||||||
|
% for dep in deps:
|
||||||
|
import ${dep}::*;
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
// macro includes
|
||||||
|
`include "uvm_macros.svh"\
|
||||||
|
</%def>\
|
||||||
|
##
|
||||||
|
##
|
||||||
|
## make_ral_pkg_fwd_decls
|
||||||
|
## ======================
|
||||||
|
##
|
||||||
|
## Generate the forward declarations for a RAL package
|
||||||
|
##
|
||||||
|
## esc_if_name as for make_ral_pkg
|
||||||
|
##
|
||||||
|
## type_regs a list of Register objects, one for each type that
|
||||||
|
## should be defined. Each MultiRegister will contribute
|
||||||
|
## just one register to the list.
|
||||||
|
##
|
||||||
|
## windows a list of Window objects
|
||||||
|
##
|
||||||
|
<%def name="make_ral_pkg_fwd_decls(esc_if_name, type_regs, windows)">\
|
||||||
|
// Forward declare all register/memory/block classes
|
||||||
|
% for r in type_regs:
|
||||||
|
typedef class ${gen_dv.rcname(esc_if_name, r)};
|
||||||
|
% endfor
|
||||||
|
% for w in windows:
|
||||||
|
typedef class ${gen_dv.mcname(esc_if_name, w)};
|
||||||
|
% endfor
|
||||||
|
typedef class ${gen_dv.bcname(esc_if_name)};\
|
||||||
|
</%def>\
|
||||||
|
##
|
||||||
|
##
|
||||||
|
## make_ral_pkg_reg_class
|
||||||
|
## ======================
|
||||||
|
##
|
||||||
|
## Generate the classes for a register inside a RAL package
|
||||||
|
##
|
||||||
|
## dv_base_prefix as for make_ral_pkg
|
||||||
|
##
|
||||||
|
## reg_width as for make_ral_pkg
|
||||||
|
##
|
||||||
|
## esc_if_name as for make_ral_pkg
|
||||||
|
##
|
||||||
|
## reg_block_path as for make_ral_pkg
|
||||||
|
##
|
||||||
|
## reg a Register or MultiRegister object
|
||||||
|
<%def name="make_ral_pkg_reg_class(dv_base_prefix, reg_width, esc_if_name, reg_block_path, reg)">\
|
||||||
|
<%
|
||||||
|
reg_name = reg.name.lower()
|
||||||
|
|
||||||
|
is_ext = reg.hwext
|
||||||
|
for field in reg.fields:
|
||||||
|
if (field.hwaccess.value[1] == HwAccess.NONE and
|
||||||
|
field.swaccess.swrd() == SwRdAccess.RD and
|
||||||
|
not field.swaccess.allows_write()):
|
||||||
|
is_ext = 1
|
||||||
|
|
||||||
|
class_name = gen_dv.rcname(esc_if_name, reg)
|
||||||
|
%>\
|
||||||
|
class ${class_name} extends ${dv_base_prefix}_reg;
|
||||||
|
// fields
|
||||||
|
% for f in reg.fields:
|
||||||
|
rand ${dv_base_prefix}_reg_field ${f.name.lower()};
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
`uvm_object_utils(${class_name})
|
||||||
|
|
||||||
|
function new(string name = "${class_name}",
|
||||||
|
int unsigned n_bits = ${reg_width},
|
||||||
|
int has_coverage = UVM_NO_COVERAGE);
|
||||||
|
super.new(name, n_bits, has_coverage);
|
||||||
|
endfunction : new
|
||||||
|
|
||||||
|
virtual function void build(csr_excl_item csr_excl = null);
|
||||||
|
// create fields
|
||||||
|
% for field in reg.fields:
|
||||||
|
<%
|
||||||
|
if len(reg.fields) == 1:
|
||||||
|
reg_field_name = reg_name
|
||||||
|
else:
|
||||||
|
reg_field_name = reg_name + "_" + field.name.lower()
|
||||||
|
%>\
|
||||||
|
${_create_reg_field(dv_base_prefix, reg_width, reg_block_path, reg.shadowed, reg.hwext, reg_field_name, field)}
|
||||||
|
% endfor
|
||||||
|
% if is_ext:
|
||||||
|
set_is_ext_reg(1);
|
||||||
|
% endif
|
||||||
|
endfunction : build
|
||||||
|
endclass : ${class_name}\
|
||||||
|
</%def>\
|
||||||
|
##
|
||||||
|
##
|
||||||
|
## _create_reg_field
|
||||||
|
## =================
|
||||||
|
##
|
||||||
|
## Generate the code that creates a uvm_reg_field object for a field
|
||||||
|
## in a register.
|
||||||
|
##
|
||||||
|
## dv_base_prefix as for make_ral_pkg
|
||||||
|
##
|
||||||
|
## reg_width as for make_ral_pkg
|
||||||
|
##
|
||||||
|
## reg_block_path as for make_ral_pkg
|
||||||
|
##
|
||||||
|
## shadowed true if the field's register is shadowed
|
||||||
|
##
|
||||||
|
## hwext true if the field's register is hwext
|
||||||
|
##
|
||||||
|
## reg_field_name a string with the name to give the field in the HDL
|
||||||
|
##
|
||||||
|
## field a Field object
|
||||||
|
<%def name="_create_reg_field(dv_base_prefix, reg_width, reg_block_path, shadowed, hwext, reg_field_name, field)">\
|
||||||
|
<%
|
||||||
|
field_size = field.bits.width()
|
||||||
|
field_access = field.swaccess.dv_rights()
|
||||||
|
|
||||||
|
if not field.hwaccess.allows_write():
|
||||||
|
field_volatile = 0
|
||||||
|
else:
|
||||||
|
field_volatile = 1
|
||||||
|
field_tags = field.tags
|
||||||
|
|
||||||
|
fname = field.name.lower()
|
||||||
|
type_id_indent = ' ' * (len(fname) + 4)
|
||||||
|
%>\
|
||||||
|
${fname} = (${dv_base_prefix}_reg_field::
|
||||||
|
${type_id_indent}type_id::create("${fname}"));
|
||||||
|
${fname}.configure(
|
||||||
|
.parent(this),
|
||||||
|
.size(${field_size}),
|
||||||
|
.lsb_pos(${field.bits.lsb}),
|
||||||
|
.access("${field_access}"),
|
||||||
|
.volatile(${field_volatile}),
|
||||||
|
.reset(${reg_width}'h${format(field.resval or 0, 'x')}),
|
||||||
|
.has_reset(1),
|
||||||
|
.is_rand(1),
|
||||||
|
.individually_accessible(1));
|
||||||
|
|
||||||
|
${fname}.set_original_access("${field_access}");
|
||||||
|
% if field_tags:
|
||||||
|
// create field tags
|
||||||
|
% for field_tag in field_tags:
|
||||||
|
<%
|
||||||
|
tag = field_tag.split(":")
|
||||||
|
%>\
|
||||||
|
% if tag[0] == "excl":
|
||||||
|
csr_excl.add_excl(${field.name.lower()}.get_full_name(), ${tag[2]}, ${tag[1]});
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
% endif
|
||||||
|
</%def>\
|
||||||
|
##
|
||||||
|
##
|
||||||
|
## make_ral_pkg_window_class
|
||||||
|
## =========================
|
||||||
|
##
|
||||||
|
## Generate the classes for a window inside a RAL package
|
||||||
|
##
|
||||||
|
## dv_base_prefix as for make_ral_pkg
|
||||||
|
##
|
||||||
|
## esc_if_name as for make_ral_pkg
|
||||||
|
##
|
||||||
|
## window a Window object
|
||||||
|
<%def name="make_ral_pkg_window_class(dv_base_prefix, esc_if_name, window)">\
|
||||||
|
<%
|
||||||
|
mem_name = window.name.lower()
|
||||||
|
mem_right = window.swaccess.dv_rights()
|
||||||
|
mem_n_bits = window.validbits
|
||||||
|
mem_size = window.items
|
||||||
|
|
||||||
|
class_name = gen_dv.mcname(esc_if_name, window)
|
||||||
|
%>\
|
||||||
|
class ${class_name} extends ${dv_base_prefix}_mem;
|
||||||
|
|
||||||
|
`uvm_object_utils(${class_name})
|
||||||
|
|
||||||
|
function new(string name = "${class_name}",
|
||||||
|
longint unsigned size = ${mem_size},
|
||||||
|
int unsigned n_bits = ${mem_n_bits},
|
||||||
|
string access = "${mem_right}",
|
||||||
|
int has_coverage = UVM_NO_COVERAGE);
|
||||||
|
super.new(name, size, n_bits, access, has_coverage);
|
||||||
|
% if window.byte_write:
|
||||||
|
set_mem_partial_write_support(1);
|
||||||
|
% endif
|
||||||
|
endfunction : new
|
||||||
|
|
||||||
|
endclass : ${class_name}
|
||||||
|
</%def>\
|
||||||
|
##
|
||||||
|
##
|
||||||
|
## make_ral_pkg_window_instances
|
||||||
|
## =============================
|
||||||
|
##
|
||||||
|
## Generate the classes for a window inside a RAL package
|
||||||
|
##
|
||||||
|
## reg_width as for make_ral_pkg
|
||||||
|
##
|
||||||
|
## esc_if_name as for make_ral_pkg
|
||||||
|
##
|
||||||
|
## rb a RegBlock object
|
||||||
|
##
|
||||||
|
<%def name="make_ral_pkg_window_instances(reg_width, esc_if_name, rb)">\
|
||||||
|
% if rb.windows:
|
||||||
|
|
||||||
|
// create memories
|
||||||
|
% for w in rb.windows:
|
||||||
|
<%
|
||||||
|
mem_name = w.name.lower()
|
||||||
|
mem_right = w.swaccess.dv_rights()
|
||||||
|
mem_offset = "{}'h{:x}".format(reg_width, w.offset)
|
||||||
|
mem_n_bits = w.validbits
|
||||||
|
mem_size = w.items
|
||||||
|
%>\
|
||||||
|
${mem_name} = ${gen_dv.mcname(esc_if_name, w)}::type_id::create("${mem_name}");
|
||||||
|
${mem_name}.configure(.parent(this));
|
||||||
|
default_map.add_mem(.mem(${mem_name}),
|
||||||
|
.offset(${mem_offset}),
|
||||||
|
.rights("${mem_right}"));
|
||||||
|
% endfor
|
||||||
|
% endif
|
||||||
|
</%def>\
|
||||||
|
##
|
||||||
|
##
|
||||||
|
## instantiate_register
|
||||||
|
## ====================
|
||||||
|
##
|
||||||
|
## Actually instantiate a register in a register block
|
||||||
|
##
|
||||||
|
## reg_width an integer giving the width of registers in bits
|
||||||
|
##
|
||||||
|
## reg_block_path as for make_ral_pkg
|
||||||
|
##
|
||||||
|
## reg the Register to instantiate
|
||||||
|
##
|
||||||
|
## reg_type a string giving the type name (a subclass of
|
||||||
|
## uvm_register) to instantiate.
|
||||||
|
##
|
||||||
|
## reg_inst a string giving the field of the uvm_reg_block that
|
||||||
|
## should be set to this new register. For single
|
||||||
|
## registers, this will just be the register name. For
|
||||||
|
## elements of multi-registers, it will be the name of an
|
||||||
|
## array item.
|
||||||
|
##
|
||||||
|
<%def name="instantiate_register(reg_width, reg_block_path, reg, reg_type, reg_inst)">\
|
||||||
|
<%
|
||||||
|
reg_name = reg.name.lower()
|
||||||
|
reg_offset = "{}'h{:x}".format(reg_width, reg.offset)
|
||||||
|
|
||||||
|
inst_id_indent = ' ' * (len(reg_inst) + 4)
|
||||||
|
%>\
|
||||||
|
${reg_inst} = (${reg_type}::
|
||||||
|
${inst_id_indent}type_id::create("${reg_name}"));
|
||||||
|
${reg_inst}.configure(.blk_parent(this));
|
||||||
|
${reg_inst}.build(csr_excl);
|
||||||
|
default_map.add_reg(.rg(${reg_inst}),
|
||||||
|
.offset(${reg_offset}));
|
||||||
|
% if reg.shadowed and reg.hwext:
|
||||||
|
<%
|
||||||
|
shadowed_reg_path = ''
|
||||||
|
for tag in reg.tags:
|
||||||
|
parts = tag.split(':')
|
||||||
|
if parts[0] == 'shadowed_reg_path':
|
||||||
|
shadowed_reg_path = parts[1]
|
||||||
|
|
||||||
|
if not shadowed_reg_path:
|
||||||
|
print("ERROR: ext shadow_reg does not have tags for shadowed_reg_path!")
|
||||||
|
assert 0
|
||||||
|
|
||||||
|
bit_idx = reg.fields[-1].bits.msb + 1
|
||||||
|
|
||||||
|
%>\
|
||||||
|
${reg_inst}.add_update_err_alert("${reg.update_err_alert}");
|
||||||
|
${reg_inst}.add_storage_err_alert("${reg.storage_err_alert}");
|
||||||
|
${reg_inst}.add_hdl_path_slice(
|
||||||
|
"${shadowed_reg_path}.committed_reg.q",
|
||||||
|
0, ${bit_idx}, 0, "BkdrRegPathRtlCommitted");
|
||||||
|
${reg_inst}.add_hdl_path_slice(
|
||||||
|
"${shadowed_reg_path}.shadow_reg.q",
|
||||||
|
0, ${bit_idx}, 0, "BkdrRegPathRtlShadow");
|
||||||
|
% endif
|
||||||
|
% for field in reg.fields:
|
||||||
|
<%
|
||||||
|
field_size = field.bits.width()
|
||||||
|
if len(reg.fields) == 1:
|
||||||
|
reg_field_name = reg_name
|
||||||
|
else:
|
||||||
|
reg_field_name = reg_name + "_" + field.name.lower()
|
||||||
|
%>\
|
||||||
|
% if ((field.hwaccess.value[1] == HwAccess.NONE and\
|
||||||
|
field.swaccess.swrd() == SwRdAccess.RD and\
|
||||||
|
not field.swaccess.allows_write())):
|
||||||
|
// constant reg
|
||||||
|
${reg_inst}.add_hdl_path_slice(
|
||||||
|
"${reg_block_path}.${reg_field_name}_qs",
|
||||||
|
${field.bits.lsb}, ${field_size}, 0, "BkdrRegPathRtl");
|
||||||
|
% else:
|
||||||
|
${reg_inst}.add_hdl_path_slice(
|
||||||
|
"${reg_block_path}.u_${reg_field_name}.q${"s" if reg.hwext else ""}",
|
||||||
|
${field.bits.lsb}, ${field_size}, 0, "BkdrRegPathRtl");
|
||||||
|
% endif
|
||||||
|
% if shadowed and not hwext:
|
||||||
|
${reg_inst}.add_hdl_path_slice(
|
||||||
|
"${reg_block_path}.u_${reg_field_name}.committed_reg.q",
|
||||||
|
${field.bits.lsb}, ${field_size}, 0, "BkdrRegPathRtlCommitted");
|
||||||
|
${reg_inst}.add_hdl_path_slice(
|
||||||
|
"${reg_block_path}.u_${reg_field_name}.shadow_reg.q",
|
||||||
|
${field.bits.lsb}, ${field_size}, 0, "BkdrRegPathRtlShadow");
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
% if reg.shadowed:
|
||||||
|
${reg_inst}.set_is_shadowed();
|
||||||
|
% endif
|
||||||
|
% if reg.tags:
|
||||||
|
// create register tags
|
||||||
|
% for reg_tag in reg.tags:
|
||||||
|
<%
|
||||||
|
tag = reg_tag.split(":")
|
||||||
|
%>\
|
||||||
|
% if tag[0] == "excl":
|
||||||
|
csr_excl.add_excl(${reg_inst}.get_full_name(), ${tag[2]}, ${tag[1]});
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
% endif
|
||||||
|
</%def>\
|
||||||
|
##
|
||||||
|
##
|
||||||
|
## apply_regwen
|
||||||
|
## ============
|
||||||
|
##
|
||||||
|
## Apply a regwen to a register
|
||||||
|
##
|
||||||
|
## rb the register block
|
||||||
|
##
|
||||||
|
## reg the Register that needs apply regwens
|
||||||
|
##
|
||||||
|
## reg_inst a string giving the field of the uvm_reg_block that
|
||||||
|
## should be updated. For single registers, this will just
|
||||||
|
## be the register name. For elements of multi-registers,
|
||||||
|
## it will be the name of an array item.
|
||||||
|
##
|
||||||
|
<%def name="apply_regwen(rb, reg, reg_inst)">\
|
||||||
|
% if reg.regwen is None:
|
||||||
|
<% return "" %>\
|
||||||
|
% endif
|
||||||
|
% for wen in rb.all_regs:
|
||||||
|
% for wen_inst, wen_reg in get_inst_to_reg_dict(wen).items():
|
||||||
|
% if reg.regwen.lower() == wen_reg.name.lower():
|
||||||
|
${wen_inst}.add_lockable_reg_or_fld(${reg_inst});
|
||||||
|
<% return "" %>\
|
||||||
|
% elif wen_reg.name.lower() in reg.regwen.lower():
|
||||||
|
% for field in wen_reg.get_field_list():
|
||||||
|
% if reg.regwen.lower() == (wen_reg.name.lower() + "_" + field.name.lower()):
|
||||||
|
${wen_inst}.${field.name.lower()}.add_lockable_reg_or_fld(${reg_inst});
|
||||||
|
<% return "" %>\
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
% endfor
|
||||||
|
</%def>\
|
|
@ -0,0 +1,167 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
"""
|
||||||
|
Register JSON validation
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging as log
|
||||||
|
from typing import Dict, List, Tuple, Union
|
||||||
|
|
||||||
|
|
||||||
|
# validating version of int(x, 0)
|
||||||
|
# returns int value, error flag
|
||||||
|
# if error flag is True value will be zero
|
||||||
|
def check_int(x: Union[int, str],
|
||||||
|
err_prefix: str,
|
||||||
|
suppress_err_msg: bool = False) -> Tuple[int, bool]:
|
||||||
|
if isinstance(x, int):
|
||||||
|
return x, False
|
||||||
|
if x[0] == '0' and len(x) > 2:
|
||||||
|
if x[1] in 'bB':
|
||||||
|
validch = '01'
|
||||||
|
elif x[1] in 'oO':
|
||||||
|
validch = '01234567'
|
||||||
|
elif x[1] in 'xX':
|
||||||
|
validch = '0123456789abcdefABCDEF'
|
||||||
|
else:
|
||||||
|
if not suppress_err_msg:
|
||||||
|
log.error(err_prefix +
|
||||||
|
": int must start digit, 0b, 0B, 0o, 0O, 0x or 0X")
|
||||||
|
return 0, True
|
||||||
|
for c in x[2:]:
|
||||||
|
if c not in validch:
|
||||||
|
if not suppress_err_msg:
|
||||||
|
log.error(err_prefix + ": Bad character " + c + " in " + x)
|
||||||
|
return 0, True
|
||||||
|
else:
|
||||||
|
if not x.isdecimal():
|
||||||
|
if not suppress_err_msg:
|
||||||
|
log.error(err_prefix + ": Number not valid int " + x)
|
||||||
|
return 0, True
|
||||||
|
return int(x, 0), False
|
||||||
|
|
||||||
|
|
||||||
|
def check_bool(x: Union[bool, str], err_prefix: str) -> Tuple[bool, bool]:
|
||||||
|
"""check_bool checks if input 'x' is one of the list:
|
||||||
|
"true", "false"
|
||||||
|
|
||||||
|
It returns value as Bool type and Error condition.
|
||||||
|
"""
|
||||||
|
if isinstance(x, bool):
|
||||||
|
# if Bool returns as it is
|
||||||
|
return x, False
|
||||||
|
if not x.lower() in ["true", "false"]:
|
||||||
|
log.error(err_prefix + ": Bad field value " + x)
|
||||||
|
return False, True
|
||||||
|
else:
|
||||||
|
return (x.lower() == "true"), False
|
||||||
|
|
||||||
|
|
||||||
|
def check_ln(obj: Dict[str, object],
|
||||||
|
x: str,
|
||||||
|
withwidth: bool,
|
||||||
|
err_prefix: str) -> int:
|
||||||
|
error = 0
|
||||||
|
entry = obj[x]
|
||||||
|
if not isinstance(entry, list):
|
||||||
|
log.error(err_prefix + ' element ' + x + ' not a list')
|
||||||
|
return 1
|
||||||
|
|
||||||
|
for y in entry:
|
||||||
|
error += check_keys(y, ln_required, ln_optional if withwidth else {},
|
||||||
|
{}, err_prefix + ' element ' + x)
|
||||||
|
if withwidth:
|
||||||
|
if 'width' in y:
|
||||||
|
w, err = check_int(y['width'], err_prefix + ' width in ' + x)
|
||||||
|
if err:
|
||||||
|
error += 1
|
||||||
|
w = 1
|
||||||
|
else:
|
||||||
|
w = 1
|
||||||
|
y['width'] = str(w)
|
||||||
|
|
||||||
|
return error
|
||||||
|
|
||||||
|
|
||||||
|
def check_keys(obj: Dict[str, object],
|
||||||
|
required_keys: Dict[str, List[str]],
|
||||||
|
optional_keys: Dict[str, List[str]],
|
||||||
|
added_keys: Dict[str, List[str]],
|
||||||
|
err_prefix: str) -> int:
|
||||||
|
error = 0
|
||||||
|
for x in required_keys:
|
||||||
|
if x not in obj:
|
||||||
|
error += 1
|
||||||
|
log.error(err_prefix + " missing required key " + x)
|
||||||
|
for x in obj:
|
||||||
|
type = None
|
||||||
|
if x in required_keys:
|
||||||
|
type = required_keys[x][0]
|
||||||
|
elif x in optional_keys:
|
||||||
|
type = optional_keys[x][0]
|
||||||
|
elif x not in added_keys:
|
||||||
|
log.warning(err_prefix + " contains extra key " + x)
|
||||||
|
if type is not None:
|
||||||
|
if type[:2] == 'ln':
|
||||||
|
error += check_ln(obj, x, type == 'lnw', err_prefix)
|
||||||
|
|
||||||
|
return error
|
||||||
|
|
||||||
|
|
||||||
|
val_types = {
|
||||||
|
'd': ["int", "integer (binary 0b, octal 0o, decimal, hex 0x)"],
|
||||||
|
'x': ["xint", "x for undefined otherwise int"],
|
||||||
|
'b': [
|
||||||
|
"bitrange", "bit number as decimal integer, "
|
||||||
|
"or bit-range as decimal integers msb:lsb"
|
||||||
|
],
|
||||||
|
'l': ["list", "comma separated list enclosed in `[]`"],
|
||||||
|
'ln': [
|
||||||
|
"name list", 'comma separated list enclosed in `[]` of '
|
||||||
|
'one or more groups that have just name and dscr keys.'
|
||||||
|
' e.g. `{ name: "name", desc: "description"}`'
|
||||||
|
],
|
||||||
|
'lnw': ["name list+", 'name list that optionally contains a width'],
|
||||||
|
'lp': ["parameter list", 'parameter list having default value optionally'],
|
||||||
|
'g': ["group", "comma separated group of key:value enclosed in `{}`"],
|
||||||
|
'lg': [
|
||||||
|
"list of group", "comma separated group of key:value enclosed in `{}`"
|
||||||
|
" the second entry of the list is the sub group format"
|
||||||
|
],
|
||||||
|
's': ["string", "string, typically short"],
|
||||||
|
't': [
|
||||||
|
"text", "string, may be multi-line enclosed in `'''` "
|
||||||
|
"may use `**bold**`, `*italic*` or `!!Reg` markup"
|
||||||
|
],
|
||||||
|
'T': ["tuple", "tuple enclosed in ()"],
|
||||||
|
'pi': ["python int", "Native Python type int (generated)"],
|
||||||
|
'pb': ["python Bool", "Native Python type Bool (generated)"],
|
||||||
|
'pl': ["python list", "Native Python type list (generated)"],
|
||||||
|
'pe': ["python enum", "Native Python type enum (generated)"]
|
||||||
|
}
|
||||||
|
|
||||||
|
# ln type has list of groups with only name and description
|
||||||
|
# (was called "subunit" in cfg_validate)
|
||||||
|
ln_required = {
|
||||||
|
'name': ['s', "name of the item"],
|
||||||
|
'desc': ['s', "description of the item"],
|
||||||
|
}
|
||||||
|
ln_optional = {
|
||||||
|
'width': ['d', "bit width of the item (if not 1)"],
|
||||||
|
}
|
||||||
|
|
||||||
|
# Registers list may have embedded keys
|
||||||
|
list_optone = {
|
||||||
|
'reserved': ['d', "number of registers to reserve space for"],
|
||||||
|
'skipto': ['d', "set next register offset to value"],
|
||||||
|
'window': [
|
||||||
|
'g', "group defining an address range "
|
||||||
|
"for something other than standard registers"
|
||||||
|
],
|
||||||
|
'multireg':
|
||||||
|
['g', "group defining registers generated "
|
||||||
|
"from a base instance."]
|
||||||
|
}
|
||||||
|
|
||||||
|
key_use = {'r': "required", 'o': "optional", 'a': "added by tool"}
|
|
@ -0,0 +1,25 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
r"""Standard version printing
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
import pkg_resources # part of setuptools
|
||||||
|
|
||||||
|
|
||||||
|
def show_and_exit(clitool: str, packages: List[str]) -> None:
|
||||||
|
util_path = os.path.dirname(os.path.realpath(clitool))
|
||||||
|
os.chdir(util_path)
|
||||||
|
ver = subprocess.run(
|
||||||
|
["git", "describe", "--always", "--dirty", "--broken"],
|
||||||
|
stdout=subprocess.PIPE).stdout.strip().decode('ascii')
|
||||||
|
if (ver == ''):
|
||||||
|
ver = 'not found (not in Git repository?)'
|
||||||
|
sys.stderr.write(clitool + " Git version " + ver + '\n')
|
||||||
|
for p in packages:
|
||||||
|
sys.stderr.write(p + ' ' + pkg_resources.require(p)[0].version + '\n')
|
||||||
|
exit(0)
|
|
@ -0,0 +1,169 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
from typing import Dict
|
||||||
|
|
||||||
|
from .access import SWAccess
|
||||||
|
from .lib import check_keys, check_str, check_bool, check_int
|
||||||
|
from .params import ReggenParams
|
||||||
|
|
||||||
|
|
||||||
|
REQUIRED_FIELDS = {
|
||||||
|
'name': ['s', "name of the window"],
|
||||||
|
'desc': ['t', "description of the window"],
|
||||||
|
'items': ['d', "size in fieldaccess width words of the window"],
|
||||||
|
'swaccess': ['s', "software access permitted"],
|
||||||
|
}
|
||||||
|
|
||||||
|
# TODO potential for additional optional to give more type info?
|
||||||
|
# eg sram-hw-port: "none", "sync", "async"
|
||||||
|
OPTIONAL_FIELDS = {
|
||||||
|
'data-intg-passthru': [
|
||||||
|
's', "True if the window has data integrity pass through. "
|
||||||
|
"Defaults to false if not present."
|
||||||
|
],
|
||||||
|
'byte-write': [
|
||||||
|
's', "True if byte writes are supported. "
|
||||||
|
"Defaults to false if not present."
|
||||||
|
],
|
||||||
|
'validbits': [
|
||||||
|
'd', "Number of valid data bits within "
|
||||||
|
"regwidth sized word. "
|
||||||
|
"Defaults to regwidth. If "
|
||||||
|
"smaller than the regwidth then in each "
|
||||||
|
"word of the window bits "
|
||||||
|
"[regwidth-1:validbits] are unused and "
|
||||||
|
"bits [validbits-1:0] are valid."
|
||||||
|
],
|
||||||
|
'unusual': [
|
||||||
|
's', "True if window has unusual parameters "
|
||||||
|
"(set to prevent Unusual: errors)."
|
||||||
|
"Defaults to false if not present."
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class Window:
|
||||||
|
'''A class representing a memory window'''
|
||||||
|
def __init__(self,
|
||||||
|
name: str,
|
||||||
|
desc: str,
|
||||||
|
unusual: bool,
|
||||||
|
byte_write: bool,
|
||||||
|
data_intg_passthru: bool,
|
||||||
|
validbits: int,
|
||||||
|
items: int,
|
||||||
|
size_in_bytes: int,
|
||||||
|
offset: int,
|
||||||
|
swaccess: SWAccess):
|
||||||
|
assert 0 < validbits
|
||||||
|
assert 0 < items <= size_in_bytes
|
||||||
|
|
||||||
|
self.name = name
|
||||||
|
self.desc = desc
|
||||||
|
self.unusual = unusual
|
||||||
|
self.byte_write = byte_write
|
||||||
|
self.data_intg_passthru = data_intg_passthru
|
||||||
|
self.validbits = validbits
|
||||||
|
self.items = items
|
||||||
|
self.size_in_bytes = size_in_bytes
|
||||||
|
self.offset = offset
|
||||||
|
self.swaccess = swaccess
|
||||||
|
|
||||||
|
# Check that offset has been adjusted so that the first item in the
|
||||||
|
# window has all zeros in the low bits.
|
||||||
|
po2_size = 1 << (self.size_in_bytes - 1).bit_length()
|
||||||
|
assert not (offset & (po2_size - 1))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_raw(offset: int,
|
||||||
|
reg_width: int,
|
||||||
|
params: ReggenParams,
|
||||||
|
raw: object) -> 'Window':
|
||||||
|
rd = check_keys(raw, 'window',
|
||||||
|
list(REQUIRED_FIELDS.keys()),
|
||||||
|
list(OPTIONAL_FIELDS.keys()))
|
||||||
|
|
||||||
|
wind_desc = 'window at offset {:#x}'.format(offset)
|
||||||
|
name = check_str(rd['name'], wind_desc)
|
||||||
|
wind_desc = '{!r} {}'.format(name, wind_desc)
|
||||||
|
|
||||||
|
desc = check_str(rd['desc'], 'desc field for ' + wind_desc)
|
||||||
|
|
||||||
|
unusual = check_bool(rd.get('unusual', False),
|
||||||
|
'unusual field for ' + wind_desc)
|
||||||
|
byte_write = check_bool(rd.get('byte-write', False),
|
||||||
|
'byte-write field for ' + wind_desc)
|
||||||
|
data_intg_passthru = check_bool(rd.get('data-intg-passthru', False),
|
||||||
|
'data-intg-passthru field for ' + wind_desc)
|
||||||
|
|
||||||
|
validbits = check_int(rd.get('validbits', reg_width),
|
||||||
|
'validbits field for ' + wind_desc)
|
||||||
|
if validbits <= 0:
|
||||||
|
raise ValueError('validbits field for {} is not positive.'
|
||||||
|
.format(wind_desc))
|
||||||
|
if validbits > reg_width:
|
||||||
|
raise ValueError('validbits field for {} is {}, '
|
||||||
|
'which is greater than {}, the register width.'
|
||||||
|
.format(wind_desc, validbits, reg_width))
|
||||||
|
|
||||||
|
r_items = check_str(rd['items'], 'items field for ' + wind_desc)
|
||||||
|
items = params.expand(r_items, 'items field for ' + wind_desc)
|
||||||
|
if items <= 0:
|
||||||
|
raise ValueError("Items field for {} is {}, "
|
||||||
|
"which isn't positive."
|
||||||
|
.format(wind_desc, items))
|
||||||
|
|
||||||
|
assert reg_width % 8 == 0
|
||||||
|
size_in_bytes = items * (reg_width // 8)
|
||||||
|
|
||||||
|
# Round size_in_bytes up to the next power of 2. The calculation is
|
||||||
|
# like clog2 calculations in SystemVerilog, where we start with the
|
||||||
|
# last index, rather than the number of elements.
|
||||||
|
assert size_in_bytes > 0
|
||||||
|
po2_size = 1 << (size_in_bytes - 1).bit_length()
|
||||||
|
|
||||||
|
# A size that isn't a power of 2 is not allowed unless the unusual flag
|
||||||
|
# is set.
|
||||||
|
if po2_size != size_in_bytes and not unusual:
|
||||||
|
raise ValueError('Items field for {} is {}, which gives a size of '
|
||||||
|
'{} bytes. This is not a power of 2 (next power '
|
||||||
|
'of 2 is {}). If you want to do this even so, '
|
||||||
|
'set the "unusual" flag.'
|
||||||
|
.format(wind_desc, items,
|
||||||
|
size_in_bytes, po2_size))
|
||||||
|
|
||||||
|
# Adjust offset if necessary to make sure the base address of the first
|
||||||
|
# item in the window has all zeros in the low bits.
|
||||||
|
addr_mask = po2_size - 1
|
||||||
|
if offset & addr_mask:
|
||||||
|
offset = (offset | addr_mask) + 1
|
||||||
|
offset = offset
|
||||||
|
|
||||||
|
swaccess = SWAccess(wind_desc, rd['swaccess'])
|
||||||
|
if not (swaccess.value[4] or unusual):
|
||||||
|
raise ValueError('swaccess field for {} is {}, which is an '
|
||||||
|
'unusual access type for a window. If you want '
|
||||||
|
'to do this, set the "unusual" flag.'
|
||||||
|
.format(wind_desc, swaccess.key))
|
||||||
|
|
||||||
|
return Window(name, desc, unusual, byte_write, data_intg_passthru,
|
||||||
|
validbits, items, size_in_bytes, offset, swaccess)
|
||||||
|
|
||||||
|
def next_offset(self, addrsep: int) -> int:
|
||||||
|
return self.offset + self.size_in_bytes
|
||||||
|
|
||||||
|
def _asdict(self) -> Dict[str, object]:
|
||||||
|
rd = {
|
||||||
|
'desc': self.desc,
|
||||||
|
'items': self.items,
|
||||||
|
'swaccess': self.swaccess.key,
|
||||||
|
'byte-write': self.byte_write,
|
||||||
|
'validbits': self.validbits,
|
||||||
|
'unusual': self.unusual
|
||||||
|
}
|
||||||
|
if self.name is not None:
|
||||||
|
rd['name'] = self.name
|
||||||
|
|
||||||
|
return {'window': rd}
|
|
@ -0,0 +1,235 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
r"""Command-line tool to validate and convert register hjson
|
||||||
|
|
||||||
|
"""
|
||||||
|
import argparse
|
||||||
|
import logging as log
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from pathlib import PurePath
|
||||||
|
|
||||||
|
from reggen import (gen_cheader, gen_dv, gen_fpv, gen_html,
|
||||||
|
gen_json, gen_rtl, gen_selfdoc, version)
|
||||||
|
from reggen.ip_block import IpBlock
|
||||||
|
|
||||||
|
DESC = """regtool, generate register info from Hjson source"""
|
||||||
|
|
||||||
|
USAGE = '''
|
||||||
|
regtool [options]
|
||||||
|
regtool [options] <input>
|
||||||
|
regtool (-h | --help)
|
||||||
|
regtool (-V | --version)
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
verbose = 0
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
prog="regtool",
|
||||||
|
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||||
|
usage=USAGE,
|
||||||
|
description=DESC)
|
||||||
|
parser.add_argument('input',
|
||||||
|
nargs='?',
|
||||||
|
metavar='file',
|
||||||
|
type=argparse.FileType('r'),
|
||||||
|
default=sys.stdin,
|
||||||
|
help='input file in Hjson type')
|
||||||
|
parser.add_argument('-d',
|
||||||
|
action='store_true',
|
||||||
|
help='Output register documentation (html)')
|
||||||
|
parser.add_argument('--cdefines',
|
||||||
|
'-D',
|
||||||
|
action='store_true',
|
||||||
|
help='Output C defines header')
|
||||||
|
parser.add_argument('--doc',
|
||||||
|
action='store_true',
|
||||||
|
help='Output source file documentation (gfm)')
|
||||||
|
parser.add_argument('-j',
|
||||||
|
action='store_true',
|
||||||
|
help='Output as formatted JSON')
|
||||||
|
parser.add_argument('-c', action='store_true', help='Output as JSON')
|
||||||
|
parser.add_argument('-r',
|
||||||
|
action='store_true',
|
||||||
|
help='Output as SystemVerilog RTL')
|
||||||
|
parser.add_argument('-s',
|
||||||
|
action='store_true',
|
||||||
|
help='Output as UVM Register class')
|
||||||
|
parser.add_argument('-f',
|
||||||
|
action='store_true',
|
||||||
|
help='Output as FPV CSR rw assertion module')
|
||||||
|
parser.add_argument('--outdir',
|
||||||
|
'-t',
|
||||||
|
help='Target directory for generated RTL; '
|
||||||
|
'tool uses ../rtl if blank.')
|
||||||
|
parser.add_argument('--dv-base-prefix',
|
||||||
|
default='dv_base',
|
||||||
|
help='Prefix for the DV register classes from which '
|
||||||
|
'the register models are derived.')
|
||||||
|
parser.add_argument('--outfile',
|
||||||
|
'-o',
|
||||||
|
type=argparse.FileType('w'),
|
||||||
|
default=sys.stdout,
|
||||||
|
help='Target filename for json, html, gfm.')
|
||||||
|
parser.add_argument('--verbose',
|
||||||
|
'-v',
|
||||||
|
action='store_true',
|
||||||
|
help='Verbose and run validate twice')
|
||||||
|
parser.add_argument('--param',
|
||||||
|
'-p',
|
||||||
|
type=str,
|
||||||
|
default="",
|
||||||
|
help='''Change the Parameter values.
|
||||||
|
Only integer value is supported.
|
||||||
|
You can add multiple param arguments.
|
||||||
|
|
||||||
|
Format: ParamA=ValA;ParamB=ValB
|
||||||
|
''')
|
||||||
|
parser.add_argument('--version',
|
||||||
|
'-V',
|
||||||
|
action='store_true',
|
||||||
|
help='Show version')
|
||||||
|
parser.add_argument('--novalidate',
|
||||||
|
action='store_true',
|
||||||
|
help='Skip validate, just output json')
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
if args.version:
|
||||||
|
version.show_and_exit(__file__, ["Hjson", "Mako"])
|
||||||
|
|
||||||
|
verbose = args.verbose
|
||||||
|
if (verbose):
|
||||||
|
log.basicConfig(format="%(levelname)s: %(message)s", level=log.DEBUG)
|
||||||
|
else:
|
||||||
|
log.basicConfig(format="%(levelname)s: %(message)s")
|
||||||
|
|
||||||
|
# Entries are triples of the form (arg, (format, dirspec)).
|
||||||
|
#
|
||||||
|
# arg is the name of the argument that selects the format. format is the
|
||||||
|
# name of the format. dirspec is None if the output is a single file; if
|
||||||
|
# the output needs a directory, it is a default path relative to the source
|
||||||
|
# file (used when --outdir is not given).
|
||||||
|
arg_to_format = [('j', ('json', None)), ('c', ('compact', None)),
|
||||||
|
('d', ('html', None)), ('doc', ('doc', None)),
|
||||||
|
('r', ('rtl', 'rtl')), ('s', ('dv', 'dv')),
|
||||||
|
('f', ('fpv', 'fpv/vip')), ('cdefines', ('cdh', None))]
|
||||||
|
format = None
|
||||||
|
dirspec = None
|
||||||
|
for arg_name, spec in arg_to_format:
|
||||||
|
if getattr(args, arg_name):
|
||||||
|
if format is not None:
|
||||||
|
log.error('Multiple output formats specified on '
|
||||||
|
'command line ({} and {}).'.format(format, spec[0]))
|
||||||
|
sys.exit(1)
|
||||||
|
format, dirspec = spec
|
||||||
|
if format is None:
|
||||||
|
format = 'hjson'
|
||||||
|
|
||||||
|
infile = args.input
|
||||||
|
|
||||||
|
# Split parameters into key=value pairs.
|
||||||
|
raw_params = args.param.split(';') if args.param else []
|
||||||
|
params = []
|
||||||
|
for idx, raw_param in enumerate(raw_params):
|
||||||
|
tokens = raw_param.split('=')
|
||||||
|
if len(tokens) != 2:
|
||||||
|
raise ValueError('Entry {} in list of parameter defaults to '
|
||||||
|
'apply is {!r}, which is not of the form '
|
||||||
|
'param=value.'
|
||||||
|
.format(idx, raw_param))
|
||||||
|
params.append((tokens[0], tokens[1]))
|
||||||
|
|
||||||
|
# Define either outfile or outdir (but not both), depending on the output
|
||||||
|
# format.
|
||||||
|
outfile = None
|
||||||
|
outdir = None
|
||||||
|
if dirspec is None:
|
||||||
|
if args.outdir is not None:
|
||||||
|
log.error('The {} format expects an output file, '
|
||||||
|
'not an output directory.'.format(format))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
outfile = args.outfile
|
||||||
|
else:
|
||||||
|
if args.outfile is not sys.stdout:
|
||||||
|
log.error('The {} format expects an output directory, '
|
||||||
|
'not an output file.'.format(format))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if args.outdir is not None:
|
||||||
|
outdir = args.outdir
|
||||||
|
elif infile is not sys.stdin:
|
||||||
|
outdir = str(PurePath(infile.name).parents[1].joinpath(dirspec))
|
||||||
|
else:
|
||||||
|
# We're using sys.stdin, so can't infer an output directory name
|
||||||
|
log.error(
|
||||||
|
'The {} format writes to an output directory, which '
|
||||||
|
'cannot be inferred automatically if the input comes '
|
||||||
|
'from stdin. Use --outdir to specify it manually.'.format(
|
||||||
|
format))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if format == 'doc':
|
||||||
|
with outfile:
|
||||||
|
gen_selfdoc.document(outfile)
|
||||||
|
exit(0)
|
||||||
|
|
||||||
|
srcfull = infile.read()
|
||||||
|
|
||||||
|
try:
|
||||||
|
obj = IpBlock.from_text(srcfull, params, infile.name)
|
||||||
|
except ValueError as err:
|
||||||
|
log.error(str(err))
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
if args.novalidate:
|
||||||
|
with outfile:
|
||||||
|
gen_json.gen_json(obj, outfile, format)
|
||||||
|
outfile.write('\n')
|
||||||
|
else:
|
||||||
|
if format == 'rtl':
|
||||||
|
return gen_rtl.gen_rtl(obj, outdir)
|
||||||
|
if format == 'dv':
|
||||||
|
return gen_dv.gen_dv(obj, args.dv_base_prefix, outdir)
|
||||||
|
if format == 'fpv':
|
||||||
|
return gen_fpv.gen_fpv(obj, outdir)
|
||||||
|
src_lic = None
|
||||||
|
src_copy = ''
|
||||||
|
found_spdx = None
|
||||||
|
found_lunder = None
|
||||||
|
copy = re.compile(r'.*(copyright.*)|(.*\(c\).*)', re.IGNORECASE)
|
||||||
|
spdx = re.compile(r'.*(SPDX-License-Identifier:.+)')
|
||||||
|
lunder = re.compile(r'.*(Licensed under.+)', re.IGNORECASE)
|
||||||
|
for line in srcfull.splitlines():
|
||||||
|
mat = copy.match(line)
|
||||||
|
if mat is not None:
|
||||||
|
src_copy += mat.group(1)
|
||||||
|
mat = spdx.match(line)
|
||||||
|
if mat is not None:
|
||||||
|
found_spdx = mat.group(1)
|
||||||
|
mat = lunder.match(line)
|
||||||
|
if mat is not None:
|
||||||
|
found_lunder = mat.group(1)
|
||||||
|
if found_lunder:
|
||||||
|
src_lic = found_lunder
|
||||||
|
if found_spdx:
|
||||||
|
src_lic += '\n' + found_spdx
|
||||||
|
|
||||||
|
with outfile:
|
||||||
|
if format == 'html':
|
||||||
|
return gen_html.gen_html(obj, outfile)
|
||||||
|
elif format == 'cdh':
|
||||||
|
return gen_cheader.gen_cdefines(obj, outfile, src_lic, src_copy)
|
||||||
|
else:
|
||||||
|
return gen_json.gen_json(obj, outfile, format)
|
||||||
|
|
||||||
|
outfile.write('\n')
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.exit(main())
|
|
@ -0,0 +1,2 @@
|
||||||
|
__pycache__/
|
||||||
|
|
|
@ -0,0 +1,8 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
from .lib import get_hjsonobj_xbars, search_ips # noqa: F401
|
||||||
|
# noqa: F401 These functions are used in topgen.py
|
||||||
|
from .merge import amend_clocks, merge_top # noqa: F401
|
||||||
|
from .validate import validate_top, check_flash # noqa: F401
|
|
@ -0,0 +1,452 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
"""This contains a class which is used to help generate `top_{name}.h` and
|
||||||
|
`top_{name}.h`.
|
||||||
|
"""
|
||||||
|
from collections import OrderedDict
|
||||||
|
from typing import Dict, List, Optional, Tuple
|
||||||
|
|
||||||
|
from mako.template import Template
|
||||||
|
|
||||||
|
from .lib import get_base_and_size, Name
|
||||||
|
|
||||||
|
from reggen.ip_block import IpBlock
|
||||||
|
|
||||||
|
|
||||||
|
class MemoryRegion(object):
|
||||||
|
def __init__(self, name: Name, base_addr: int, size_bytes: int):
|
||||||
|
assert isinstance(base_addr, int)
|
||||||
|
self.name = name
|
||||||
|
self.base_addr = base_addr
|
||||||
|
self.size_bytes = size_bytes
|
||||||
|
self.size_words = (size_bytes + 3) // 4
|
||||||
|
|
||||||
|
def base_addr_name(self):
|
||||||
|
return self.name + Name(["base", "addr"])
|
||||||
|
|
||||||
|
def offset_name(self):
|
||||||
|
return self.name + Name(["offset"])
|
||||||
|
|
||||||
|
def size_bytes_name(self):
|
||||||
|
return self.name + Name(["size", "bytes"])
|
||||||
|
|
||||||
|
def size_words_name(self):
|
||||||
|
return self.name + Name(["size", "words"])
|
||||||
|
|
||||||
|
|
||||||
|
class CEnum(object):
|
||||||
|
def __init__(self, name):
|
||||||
|
self.name = name
|
||||||
|
self.enum_counter = 0
|
||||||
|
self.finalized = False
|
||||||
|
|
||||||
|
self.constants = []
|
||||||
|
|
||||||
|
def add_constant(self, constant_name, docstring=""):
|
||||||
|
assert not self.finalized
|
||||||
|
|
||||||
|
full_name = self.name + constant_name
|
||||||
|
|
||||||
|
value = self.enum_counter
|
||||||
|
self.enum_counter += 1
|
||||||
|
|
||||||
|
self.constants.append((full_name, value, docstring))
|
||||||
|
|
||||||
|
return full_name
|
||||||
|
|
||||||
|
def add_last_constant(self, docstring=""):
|
||||||
|
assert not self.finalized
|
||||||
|
|
||||||
|
full_name = self.name + Name(["last"])
|
||||||
|
|
||||||
|
_, last_val, _ = self.constants[-1]
|
||||||
|
|
||||||
|
self.constants.append((full_name, last_val, r"\internal " + docstring))
|
||||||
|
self.finalized = True
|
||||||
|
|
||||||
|
def render(self):
|
||||||
|
template = ("typedef enum ${enum.name.as_snake_case()} {\n"
|
||||||
|
"% for name, value, docstring in enum.constants:\n"
|
||||||
|
" ${name.as_c_enum()} = ${value}, /**< ${docstring} */\n"
|
||||||
|
"% endfor\n"
|
||||||
|
"} ${enum.name.as_c_type()};")
|
||||||
|
return Template(template).render(enum=self)
|
||||||
|
|
||||||
|
|
||||||
|
class CArrayMapping(object):
|
||||||
|
def __init__(self, name, output_type_name):
|
||||||
|
self.name = name
|
||||||
|
self.output_type_name = output_type_name
|
||||||
|
|
||||||
|
self.mapping = OrderedDict()
|
||||||
|
|
||||||
|
def add_entry(self, in_name, out_name):
|
||||||
|
self.mapping[in_name] = out_name
|
||||||
|
|
||||||
|
def render_declaration(self):
|
||||||
|
template = (
|
||||||
|
"extern const ${mapping.output_type_name.as_c_type()}\n"
|
||||||
|
" ${mapping.name.as_snake_case()}[${len(mapping.mapping)}];")
|
||||||
|
return Template(template).render(mapping=self)
|
||||||
|
|
||||||
|
def render_definition(self):
|
||||||
|
template = (
|
||||||
|
"const ${mapping.output_type_name.as_c_type()}\n"
|
||||||
|
" ${mapping.name.as_snake_case()}[${len(mapping.mapping)}] = {\n"
|
||||||
|
"% for in_name, out_name in mapping.mapping.items():\n"
|
||||||
|
" [${in_name.as_c_enum()}] = ${out_name.as_c_enum()},\n"
|
||||||
|
"% endfor\n"
|
||||||
|
"};\n")
|
||||||
|
return Template(template).render(mapping=self)
|
||||||
|
|
||||||
|
|
||||||
|
class TopGenC:
|
||||||
|
def __init__(self, top_info, name_to_block: Dict[str, IpBlock]):
|
||||||
|
self.top = top_info
|
||||||
|
self._top_name = Name(["top"]) + Name.from_snake_case(top_info["name"])
|
||||||
|
self._name_to_block = name_to_block
|
||||||
|
|
||||||
|
# The .c file needs the .h file's relative path, store it here
|
||||||
|
self.header_path = None
|
||||||
|
|
||||||
|
self._init_plic_targets()
|
||||||
|
self._init_plic_mapping()
|
||||||
|
self._init_alert_mapping()
|
||||||
|
self._init_pinmux_mapping()
|
||||||
|
self._init_pwrmgr_wakeups()
|
||||||
|
self._init_rstmgr_sw_rsts()
|
||||||
|
self._init_pwrmgr_reset_requests()
|
||||||
|
self._init_clkmgr_clocks()
|
||||||
|
|
||||||
|
def devices(self) -> List[Tuple[Tuple[str, Optional[str]], MemoryRegion]]:
|
||||||
|
'''Return a list of MemoryRegion objects for devices on the bus
|
||||||
|
|
||||||
|
The list returned is pairs (full_if, region) where full_if is itself a
|
||||||
|
pair (inst_name, if_name). inst_name is the name of some IP block
|
||||||
|
instantiation. if_name is the name of the interface (may be None).
|
||||||
|
region is a MemoryRegion object representing the device.
|
||||||
|
|
||||||
|
'''
|
||||||
|
ret = [] # type: List[Tuple[Tuple[str, Optional[str]], MemoryRegion]]
|
||||||
|
for inst in self.top['module']:
|
||||||
|
block = self._name_to_block[inst['type']]
|
||||||
|
for if_name, rb in block.reg_blocks.items():
|
||||||
|
full_if = (inst['name'], if_name)
|
||||||
|
full_if_name = Name.from_snake_case(full_if[0])
|
||||||
|
if if_name is not None:
|
||||||
|
full_if_name += Name.from_snake_case(if_name)
|
||||||
|
|
||||||
|
name = self._top_name + full_if_name
|
||||||
|
base, size = get_base_and_size(self._name_to_block,
|
||||||
|
inst, if_name)
|
||||||
|
|
||||||
|
region = MemoryRegion(name, base, size)
|
||||||
|
ret.append((full_if, region))
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def memories(self):
|
||||||
|
ret = []
|
||||||
|
for m in self.top["memory"]:
|
||||||
|
ret.append((m["name"],
|
||||||
|
MemoryRegion(self._top_name +
|
||||||
|
Name.from_snake_case(m["name"]),
|
||||||
|
int(m["base_addr"], 0),
|
||||||
|
int(m["size"], 0))))
|
||||||
|
|
||||||
|
for inst in self.top['module']:
|
||||||
|
if "memory" in inst:
|
||||||
|
for if_name, val in inst["memory"].items():
|
||||||
|
base, size = get_base_and_size(self._name_to_block,
|
||||||
|
inst, if_name)
|
||||||
|
|
||||||
|
name = self._top_name + Name.from_snake_case(val["label"])
|
||||||
|
region = MemoryRegion(name, base, size)
|
||||||
|
ret.append((val["label"], region))
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def _init_plic_targets(self):
|
||||||
|
enum = CEnum(self._top_name + Name(["plic", "target"]))
|
||||||
|
|
||||||
|
for core_id in range(int(self.top["num_cores"])):
|
||||||
|
enum.add_constant(Name(["ibex", str(core_id)]),
|
||||||
|
docstring="Ibex Core {}".format(core_id))
|
||||||
|
|
||||||
|
enum.add_last_constant("Final PLIC target")
|
||||||
|
|
||||||
|
self.plic_targets = enum
|
||||||
|
|
||||||
|
def _init_plic_mapping(self):
|
||||||
|
"""We eventually want to generate a mapping from interrupt id to the
|
||||||
|
source peripheral.
|
||||||
|
|
||||||
|
In order to do so, we generate two enums (one for interrupts, one for
|
||||||
|
sources), and store the generated names in a dictionary that represents
|
||||||
|
the mapping.
|
||||||
|
|
||||||
|
PLIC Interrupt ID 0 corresponds to no interrupt, and so no peripheral,
|
||||||
|
so we encode that in the enum as "unknown".
|
||||||
|
|
||||||
|
The interrupts have to be added in order, with "none" first, to ensure
|
||||||
|
that they get the correct mapping to their PLIC id, which is used for
|
||||||
|
addressing the right registers and bits.
|
||||||
|
"""
|
||||||
|
sources = CEnum(self._top_name + Name(["plic", "peripheral"]))
|
||||||
|
interrupts = CEnum(self._top_name + Name(["plic", "irq", "id"]))
|
||||||
|
plic_mapping = CArrayMapping(
|
||||||
|
self._top_name + Name(["plic", "interrupt", "for", "peripheral"]),
|
||||||
|
sources.name)
|
||||||
|
|
||||||
|
unknown_source = sources.add_constant(Name(["unknown"]),
|
||||||
|
docstring="Unknown Peripheral")
|
||||||
|
none_irq_id = interrupts.add_constant(Name(["none"]),
|
||||||
|
docstring="No Interrupt")
|
||||||
|
plic_mapping.add_entry(none_irq_id, unknown_source)
|
||||||
|
|
||||||
|
# When we generate the `interrupts` enum, the only info we have about
|
||||||
|
# the source is the module name. We'll use `source_name_map` to map a
|
||||||
|
# short module name to the full name object used for the enum constant.
|
||||||
|
source_name_map = {}
|
||||||
|
|
||||||
|
for name in self.top["interrupt_module"]:
|
||||||
|
source_name = sources.add_constant(Name.from_snake_case(name),
|
||||||
|
docstring=name)
|
||||||
|
source_name_map[name] = source_name
|
||||||
|
|
||||||
|
sources.add_last_constant("Final PLIC peripheral")
|
||||||
|
|
||||||
|
for intr in self.top["interrupt"]:
|
||||||
|
# Some interrupts are multiple bits wide. Here we deal with that by
|
||||||
|
# adding a bit-index suffix
|
||||||
|
if "width" in intr and int(intr["width"]) != 1:
|
||||||
|
for i in range(int(intr["width"])):
|
||||||
|
name = Name.from_snake_case(intr["name"]) + Name([str(i)])
|
||||||
|
irq_id = interrupts.add_constant(name,
|
||||||
|
docstring="{} {}".format(
|
||||||
|
intr["name"], i))
|
||||||
|
source_name = source_name_map[intr["module_name"]]
|
||||||
|
plic_mapping.add_entry(irq_id, source_name)
|
||||||
|
else:
|
||||||
|
name = Name.from_snake_case(intr["name"])
|
||||||
|
irq_id = interrupts.add_constant(name, docstring=intr["name"])
|
||||||
|
source_name = source_name_map[intr["module_name"]]
|
||||||
|
plic_mapping.add_entry(irq_id, source_name)
|
||||||
|
|
||||||
|
interrupts.add_last_constant("The Last Valid Interrupt ID.")
|
||||||
|
|
||||||
|
self.plic_sources = sources
|
||||||
|
self.plic_interrupts = interrupts
|
||||||
|
self.plic_mapping = plic_mapping
|
||||||
|
|
||||||
|
def _init_alert_mapping(self):
|
||||||
|
"""We eventually want to generate a mapping from alert id to the source
|
||||||
|
peripheral.
|
||||||
|
|
||||||
|
In order to do so, we generate two enums (one for alerts, one for
|
||||||
|
sources), and store the generated names in a dictionary that represents
|
||||||
|
the mapping.
|
||||||
|
|
||||||
|
Alert Handler has no concept of "no alert", unlike the PLIC.
|
||||||
|
|
||||||
|
The alerts have to be added in order, to ensure that they get the
|
||||||
|
correct mapping to their alert id, which is used for addressing the
|
||||||
|
right registers and bits.
|
||||||
|
"""
|
||||||
|
sources = CEnum(self._top_name + Name(["alert", "peripheral"]))
|
||||||
|
alerts = CEnum(self._top_name + Name(["alert", "id"]))
|
||||||
|
alert_mapping = CArrayMapping(
|
||||||
|
self._top_name + Name(["alert", "for", "peripheral"]),
|
||||||
|
sources.name)
|
||||||
|
|
||||||
|
# When we generate the `alerts` enum, the only info we have about the
|
||||||
|
# source is the module name. We'll use `source_name_map` to map a short
|
||||||
|
# module name to the full name object used for the enum constant.
|
||||||
|
source_name_map = {}
|
||||||
|
|
||||||
|
for name in self.top["alert_module"]:
|
||||||
|
source_name = sources.add_constant(Name.from_snake_case(name),
|
||||||
|
docstring=name)
|
||||||
|
source_name_map[name] = source_name
|
||||||
|
|
||||||
|
sources.add_last_constant("Final Alert peripheral")
|
||||||
|
|
||||||
|
for alert in self.top["alert"]:
|
||||||
|
if "width" in alert and int(alert["width"]) != 1:
|
||||||
|
for i in range(int(alert["width"])):
|
||||||
|
name = Name.from_snake_case(alert["name"]) + Name([str(i)])
|
||||||
|
irq_id = alerts.add_constant(name,
|
||||||
|
docstring="{} {}".format(
|
||||||
|
alert["name"], i))
|
||||||
|
source_name = source_name_map[alert["module_name"]]
|
||||||
|
alert_mapping.add_entry(irq_id, source_name)
|
||||||
|
else:
|
||||||
|
name = Name.from_snake_case(alert["name"])
|
||||||
|
alert_id = alerts.add_constant(name, docstring=alert["name"])
|
||||||
|
source_name = source_name_map[alert["module_name"]]
|
||||||
|
alert_mapping.add_entry(alert_id, source_name)
|
||||||
|
|
||||||
|
alerts.add_last_constant("The Last Valid Alert ID.")
|
||||||
|
|
||||||
|
self.alert_sources = sources
|
||||||
|
self.alert_alerts = alerts
|
||||||
|
self.alert_mapping = alert_mapping
|
||||||
|
|
||||||
|
def _init_pinmux_mapping(self):
|
||||||
|
"""Generate C enums for addressing pinmux registers and in/out selects.
|
||||||
|
|
||||||
|
Inputs/outputs are connected in the order the modules are listed in
|
||||||
|
the hjson under the "mio_modules" key. For each module, the corresponding
|
||||||
|
inouts are connected first, followed by either the inputs or the outputs.
|
||||||
|
|
||||||
|
Inputs:
|
||||||
|
- Peripheral chooses register field (pinmux_peripheral_in)
|
||||||
|
- Insel chooses MIO input (pinmux_insel)
|
||||||
|
|
||||||
|
Outputs:
|
||||||
|
- MIO chooses register field (pinmux_mio_out)
|
||||||
|
- Outsel chooses peripheral output (pinmux_outsel)
|
||||||
|
|
||||||
|
Insel and outsel have some special values which are captured here too.
|
||||||
|
"""
|
||||||
|
pinmux_info = self.top['pinmux']
|
||||||
|
pinout_info = self.top['pinout']
|
||||||
|
|
||||||
|
# Peripheral Inputs
|
||||||
|
peripheral_in = CEnum(self._top_name +
|
||||||
|
Name(['pinmux', 'peripheral', 'in']))
|
||||||
|
i = 0
|
||||||
|
for sig in pinmux_info['ios']:
|
||||||
|
if sig['connection'] == 'muxed' and sig['type'] in ['inout', 'input']:
|
||||||
|
index = Name([str(sig['idx'])]) if sig['idx'] != -1 else Name([])
|
||||||
|
name = Name.from_snake_case(sig['name']) + index
|
||||||
|
peripheral_in.add_constant(name, docstring='Peripheral Input {}'.format(i))
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
peripheral_in.add_last_constant('Last valid peripheral input')
|
||||||
|
|
||||||
|
# Pinmux Input Selects
|
||||||
|
insel = CEnum(self._top_name + Name(['pinmux', 'insel']))
|
||||||
|
insel.add_constant(Name(['constant', 'zero']),
|
||||||
|
docstring='Tie constantly to zero')
|
||||||
|
insel.add_constant(Name(['constant', 'one']),
|
||||||
|
docstring='Tie constantly to one')
|
||||||
|
i = 0
|
||||||
|
for pad in pinout_info['pads']:
|
||||||
|
if pad['connection'] == 'muxed':
|
||||||
|
insel.add_constant(Name([pad['name']]),
|
||||||
|
docstring='MIO Pad {}'.format(i))
|
||||||
|
i += 1
|
||||||
|
insel.add_last_constant('Last valid insel value')
|
||||||
|
|
||||||
|
# MIO Outputs
|
||||||
|
mio_out = CEnum(self._top_name + Name(['pinmux', 'mio', 'out']))
|
||||||
|
i = 0
|
||||||
|
for pad in pinout_info['pads']:
|
||||||
|
if pad['connection'] == 'muxed':
|
||||||
|
mio_out.add_constant(Name.from_snake_case(pad['name']),
|
||||||
|
docstring='MIO Pad {}'.format(i))
|
||||||
|
i += 1
|
||||||
|
mio_out.add_last_constant('Last valid mio output')
|
||||||
|
|
||||||
|
# Pinmux Output Selects
|
||||||
|
outsel = CEnum(self._top_name + Name(['pinmux', 'outsel']))
|
||||||
|
outsel.add_constant(Name(['constant', 'zero']),
|
||||||
|
docstring='Tie constantly to zero')
|
||||||
|
outsel.add_constant(Name(['constant', 'one']),
|
||||||
|
docstring='Tie constantly to one')
|
||||||
|
outsel.add_constant(Name(['constant', 'high', 'z']),
|
||||||
|
docstring='Tie constantly to high-Z')
|
||||||
|
i = 0
|
||||||
|
for sig in pinmux_info['ios']:
|
||||||
|
if sig['connection'] == 'muxed' and sig['type'] in ['inout', 'output']:
|
||||||
|
index = Name([str(sig['idx'])]) if sig['idx'] != -1 else Name([])
|
||||||
|
name = Name.from_snake_case(sig['name']) + index
|
||||||
|
outsel.add_constant(name, docstring='Peripheral Output {}'.format(i))
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
outsel.add_last_constant('Last valid outsel value')
|
||||||
|
|
||||||
|
self.pinmux_peripheral_in = peripheral_in
|
||||||
|
self.pinmux_insel = insel
|
||||||
|
self.pinmux_mio_out = mio_out
|
||||||
|
self.pinmux_outsel = outsel
|
||||||
|
|
||||||
|
def _init_pwrmgr_wakeups(self):
|
||||||
|
enum = CEnum(self._top_name +
|
||||||
|
Name(["power", "manager", "wake", "ups"]))
|
||||||
|
|
||||||
|
for signal in self.top["wakeups"]:
|
||||||
|
enum.add_constant(
|
||||||
|
Name.from_snake_case(signal["module"]) +
|
||||||
|
Name.from_snake_case(signal["name"]))
|
||||||
|
|
||||||
|
enum.add_last_constant("Last valid pwrmgr wakeup signal")
|
||||||
|
|
||||||
|
self.pwrmgr_wakeups = enum
|
||||||
|
|
||||||
|
# Enumerates the positions of all software controllable resets
|
||||||
|
def _init_rstmgr_sw_rsts(self):
|
||||||
|
sw_rsts = self.top['resets'].get_sw_resets()
|
||||||
|
|
||||||
|
enum = CEnum(self._top_name +
|
||||||
|
Name(["reset", "manager", "sw", "resets"]))
|
||||||
|
|
||||||
|
for rst in sw_rsts:
|
||||||
|
enum.add_constant(Name.from_snake_case(rst))
|
||||||
|
|
||||||
|
enum.add_last_constant("Last valid rstmgr software reset request")
|
||||||
|
|
||||||
|
self.rstmgr_sw_rsts = enum
|
||||||
|
|
||||||
|
def _init_pwrmgr_reset_requests(self):
|
||||||
|
enum = CEnum(self._top_name +
|
||||||
|
Name(["power", "manager", "reset", "requests"]))
|
||||||
|
|
||||||
|
for signal in self.top["reset_requests"]:
|
||||||
|
enum.add_constant(
|
||||||
|
Name.from_snake_case(signal["module"]) +
|
||||||
|
Name.from_snake_case(signal["name"]))
|
||||||
|
|
||||||
|
enum.add_last_constant("Last valid pwrmgr reset_request signal")
|
||||||
|
|
||||||
|
self.pwrmgr_reset_requests = enum
|
||||||
|
|
||||||
|
def _init_clkmgr_clocks(self):
|
||||||
|
"""
|
||||||
|
Creates CEnums for accessing the software-controlled clocks in the
|
||||||
|
design.
|
||||||
|
|
||||||
|
The logic here matches the logic in topgen.py in how it instantiates the
|
||||||
|
clock manager with the described clocks.
|
||||||
|
|
||||||
|
We differentiate "gateable" clocks and "hintable" clocks because the
|
||||||
|
clock manager has separate register interfaces for each group.
|
||||||
|
"""
|
||||||
|
clocks = self.top['clocks']
|
||||||
|
|
||||||
|
aon_clocks = set(src.name
|
||||||
|
for src in clocks.all_srcs.values() if src.aon)
|
||||||
|
|
||||||
|
gateable_clocks = CEnum(self._top_name + Name(["gateable", "clocks"]))
|
||||||
|
hintable_clocks = CEnum(self._top_name + Name(["hintable", "clocks"]))
|
||||||
|
|
||||||
|
# This replicates the behaviour in `topgen.py` in deriving `hints` and
|
||||||
|
# `sw_clocks`.
|
||||||
|
for group in clocks.groups.values():
|
||||||
|
for name, source in group.clocks.items():
|
||||||
|
if source.name not in aon_clocks:
|
||||||
|
# All these clocks start with `clk_` which is redundant.
|
||||||
|
clock_name = Name.from_snake_case(name).remove_part("clk")
|
||||||
|
docstring = "Clock {} in group {}".format(name, group.name)
|
||||||
|
if group.sw_cg == "yes":
|
||||||
|
gateable_clocks.add_constant(clock_name, docstring)
|
||||||
|
elif group.sw_cg == "hint":
|
||||||
|
hintable_clocks.add_constant(clock_name, docstring)
|
||||||
|
|
||||||
|
gateable_clocks.add_last_constant("Last Valid Gateable Clock")
|
||||||
|
hintable_clocks.add_last_constant("Last Valid Hintable Clock")
|
||||||
|
|
||||||
|
self.clkmgr_gateable_clocks = gateable_clocks
|
||||||
|
self.clkmgr_hintable_clocks = hintable_clocks
|
|
@ -0,0 +1,169 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
from typing import Dict, List
|
||||||
|
|
||||||
|
|
||||||
|
def _yn_to_bool(yn: object) -> bool:
|
||||||
|
yn_str = str(yn)
|
||||||
|
if yn_str.lower() == 'yes':
|
||||||
|
return True
|
||||||
|
if yn_str.lower() == 'no':
|
||||||
|
return False
|
||||||
|
raise ValueError('Unknown yes/no value: {!r}.'.format(yn))
|
||||||
|
|
||||||
|
|
||||||
|
def _bool_to_yn(val: bool) -> str:
|
||||||
|
return 'yes' if val else 'no'
|
||||||
|
|
||||||
|
|
||||||
|
def _to_int(val: object) -> int:
|
||||||
|
if isinstance(val, int):
|
||||||
|
return val
|
||||||
|
return int(str(val))
|
||||||
|
|
||||||
|
|
||||||
|
def _check_choices(val: str, what: str, choices: List[str]) -> str:
|
||||||
|
if val in choices:
|
||||||
|
return val
|
||||||
|
raise ValueError('{} is {!r}, which is not one of the expected values: {}.'
|
||||||
|
.format(what, val, choices))
|
||||||
|
|
||||||
|
|
||||||
|
class SourceClock:
|
||||||
|
'''A clock source (input to the top-level)'''
|
||||||
|
def __init__(self, raw: Dict[str, object]):
|
||||||
|
self.name = str(raw['name'])
|
||||||
|
self.aon = _yn_to_bool(raw['aon'])
|
||||||
|
self.freq = _to_int(raw['freq'])
|
||||||
|
|
||||||
|
def _asdict(self) -> Dict[str, object]:
|
||||||
|
return {
|
||||||
|
'name': self.name,
|
||||||
|
'aon': _bool_to_yn(self.aon),
|
||||||
|
'freq': str(self.freq)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class DerivedSourceClock(SourceClock):
|
||||||
|
'''A derived source clock (divided down from some other clock)'''
|
||||||
|
def __init__(self,
|
||||||
|
raw: Dict[str, object],
|
||||||
|
sources: Dict[str, SourceClock]):
|
||||||
|
super().__init__(raw)
|
||||||
|
self.div = _to_int(raw['div'])
|
||||||
|
self.src = sources[str(raw['src'])]
|
||||||
|
|
||||||
|
def _asdict(self) -> Dict[str, object]:
|
||||||
|
ret = super()._asdict()
|
||||||
|
ret['div'] = str(self.div)
|
||||||
|
ret['src'] = self.src.name
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
class Group:
|
||||||
|
def __init__(self,
|
||||||
|
raw: Dict[str, object],
|
||||||
|
sources: Dict[str, SourceClock],
|
||||||
|
what: str):
|
||||||
|
self.name = str(raw['name'])
|
||||||
|
self.src = str(raw['src'])
|
||||||
|
self.sw_cg = _check_choices(str(raw['sw_cg']), 'sw_cg for ' + what,
|
||||||
|
['yes', 'no', 'hint'])
|
||||||
|
if self.src == 'yes' and self.sw_cg != 'no':
|
||||||
|
raise ValueError(f'Clock group {self.name} has an invalid '
|
||||||
|
f'combination of src and sw_cg: {self.src} and '
|
||||||
|
f'{self.sw_cg}, respectively.')
|
||||||
|
|
||||||
|
self.unique = _yn_to_bool(raw.get('unique', 'no'))
|
||||||
|
if self.sw_cg == 'no' and self.unique:
|
||||||
|
raise ValueError(f'Clock group {self.name} has an invalid '
|
||||||
|
f'combination with sw_cg of {self.sw_cg} and '
|
||||||
|
f'unique set.')
|
||||||
|
|
||||||
|
self.clocks = {} # type: Dict[str, SourceClock]
|
||||||
|
raw_clocks = raw.get('clocks', {})
|
||||||
|
if not isinstance(raw_clocks, dict):
|
||||||
|
raise ValueError(f'clocks for {what} is not a dictionary')
|
||||||
|
for clk_name, src_name in raw_clocks.items():
|
||||||
|
src = sources.get(src_name)
|
||||||
|
if src is None:
|
||||||
|
raise ValueError(f'The {clk_name} entry of clocks for {what} '
|
||||||
|
f'has source {src_name}, which is not a '
|
||||||
|
f'known clock source.')
|
||||||
|
self.clocks[clk_name] = src
|
||||||
|
|
||||||
|
def add_clock(self, clk_name: str, src: SourceClock):
|
||||||
|
# Duplicates are ok, so long as they have the same source.
|
||||||
|
existing_src = self.clocks.get(clk_name)
|
||||||
|
if existing_src is not None:
|
||||||
|
if existing_src is not src:
|
||||||
|
raise ValueError(f'Cannot add clock {clk_name} to group '
|
||||||
|
f'{self.name} with source {src.name}: the '
|
||||||
|
f'clock is there already with source '
|
||||||
|
f'{existing_src.name}.')
|
||||||
|
else:
|
||||||
|
self.clocks[clk_name] = src
|
||||||
|
|
||||||
|
def _asdict(self) -> Dict[str, object]:
|
||||||
|
return {
|
||||||
|
'name': self.name,
|
||||||
|
'src': self.src,
|
||||||
|
'sw_cg': self.sw_cg,
|
||||||
|
'unique': _bool_to_yn(self.unique),
|
||||||
|
'clocks': {name: src.name for name, src in self.clocks.items()}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class Clocks:
|
||||||
|
'''Clock connections for the chip'''
|
||||||
|
def __init__(self, raw: Dict[str, object]):
|
||||||
|
self.hier_paths = {}
|
||||||
|
assert isinstance(raw['hier_paths'], dict)
|
||||||
|
for grp_src, path in raw['hier_paths'].items():
|
||||||
|
self.hier_paths[str(grp_src)] = str(path)
|
||||||
|
|
||||||
|
assert isinstance(raw['srcs'], list)
|
||||||
|
self.srcs = {}
|
||||||
|
for r in raw['srcs']:
|
||||||
|
clk = SourceClock(r)
|
||||||
|
self.srcs[clk.name] = clk
|
||||||
|
|
||||||
|
self.derived_srcs = {}
|
||||||
|
for r in raw['derived_srcs']:
|
||||||
|
clk = DerivedSourceClock(r, self.srcs)
|
||||||
|
self.derived_srcs[clk.name] = clk
|
||||||
|
|
||||||
|
self.all_srcs = self.srcs.copy()
|
||||||
|
self.all_srcs.update(self.derived_srcs)
|
||||||
|
|
||||||
|
self.groups = {}
|
||||||
|
assert isinstance(raw['groups'], list)
|
||||||
|
for idx, raw_grp in enumerate(raw['groups']):
|
||||||
|
assert isinstance(raw_grp, dict)
|
||||||
|
grp = Group(raw_grp, self.srcs, f'clocks.groups[{idx}]')
|
||||||
|
self.groups[grp.name] = grp
|
||||||
|
|
||||||
|
def _asdict(self) -> Dict[str, object]:
|
||||||
|
return {
|
||||||
|
'hier_paths': self.hier_paths,
|
||||||
|
'srcs': list(self.srcs.values()),
|
||||||
|
'derived_srcs': list(self.derived_srcs.values()),
|
||||||
|
'groups': list(self.groups.values())
|
||||||
|
}
|
||||||
|
|
||||||
|
def add_clock_to_group(self, grp: Group, clk_name: str, src_name: str):
|
||||||
|
src = self.all_srcs.get(src_name)
|
||||||
|
if src is None:
|
||||||
|
raise ValueError(f'Cannot add clock {clk_name} to group '
|
||||||
|
f'{grp.name}: the given source name is '
|
||||||
|
f'{src_name}, which is unknown.')
|
||||||
|
grp.add_clock(clk_name, src)
|
||||||
|
|
||||||
|
def get_clock_by_name(self, name: str) -> object:
|
||||||
|
|
||||||
|
ret = self.all_srcs.get(name)
|
||||||
|
if ret is None:
|
||||||
|
raise ValueError(f'{name} is not a valid clock')
|
||||||
|
return ret
|
|
@ -0,0 +1,46 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
import logging as log
|
||||||
|
from typing import Optional, Tuple
|
||||||
|
|
||||||
|
from mako import exceptions # type: ignore
|
||||||
|
from mako.lookup import TemplateLookup # type: ignore
|
||||||
|
from pkg_resources import resource_filename
|
||||||
|
|
||||||
|
from reggen.gen_dv import gen_core_file
|
||||||
|
|
||||||
|
from .top import Top
|
||||||
|
|
||||||
|
|
||||||
|
def sv_base_addr(top: Top, if_name: Tuple[str, Optional[str]]) -> str:
|
||||||
|
'''Get the base address of a device interface in SV syntax'''
|
||||||
|
return "{}'h{:x}".format(top.regwidth, top.if_addrs[if_name])
|
||||||
|
|
||||||
|
|
||||||
|
def gen_dv(top: Top,
|
||||||
|
dv_base_prefix: str,
|
||||||
|
outdir: str) -> int:
|
||||||
|
'''Generate DV RAL model for a Top'''
|
||||||
|
# Read template
|
||||||
|
lookup = TemplateLookup(directories=[resource_filename('topgen', '.'),
|
||||||
|
resource_filename('reggen', '.')])
|
||||||
|
uvm_reg_tpl = lookup.get_template('top_uvm_reg.sv.tpl')
|
||||||
|
|
||||||
|
# Expand template
|
||||||
|
try:
|
||||||
|
to_write = uvm_reg_tpl.render(top=top,
|
||||||
|
dv_base_prefix=dv_base_prefix)
|
||||||
|
except: # noqa: E722
|
||||||
|
log.error(exceptions.text_error_template().render())
|
||||||
|
return 1
|
||||||
|
|
||||||
|
# Dump to output file
|
||||||
|
dest_path = '{}/chip_ral_pkg.sv'.format(outdir)
|
||||||
|
with open(dest_path, 'w') as fout:
|
||||||
|
fout.write(to_write)
|
||||||
|
|
||||||
|
gen_core_file(outdir, 'chip', dv_base_prefix, ['chip_ral_pkg.sv'])
|
||||||
|
|
||||||
|
return 0
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,490 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
import logging as log
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from collections import OrderedDict
|
||||||
|
from copy import deepcopy
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Dict, List, Optional, Tuple
|
||||||
|
|
||||||
|
import hjson
|
||||||
|
from reggen.ip_block import IpBlock
|
||||||
|
|
||||||
|
# Ignore flake8 warning as the function is used in the template
|
||||||
|
# disable isort formating, as conflicting with flake8
|
||||||
|
from .intermodule import find_otherside_modules # noqa : F401 # isort:skip
|
||||||
|
from .intermodule import im_portname, im_defname, im_netname # noqa : F401 # isort:skip
|
||||||
|
from .intermodule import get_dangling_im_def # noqa : F401 # isort:skip
|
||||||
|
|
||||||
|
|
||||||
|
class Name:
|
||||||
|
"""
|
||||||
|
We often need to format names in specific ways; this class does so.
|
||||||
|
|
||||||
|
To simplify parsing and reassembling of name strings, this class
|
||||||
|
stores the name parts as a canonical list of strings internally
|
||||||
|
(in self.parts).
|
||||||
|
|
||||||
|
The "from_*" functions parse and split a name string into the canonical
|
||||||
|
list, whereas the "as_*" functions reassemble the canonical list in the
|
||||||
|
format specified.
|
||||||
|
|
||||||
|
For example, ex = Name.from_snake_case("example_name") gets split into
|
||||||
|
["example", "name"] internally, and ex.as_camel_case() reassembles this
|
||||||
|
internal representation into "ExampleName".
|
||||||
|
"""
|
||||||
|
def __add__(self, other):
|
||||||
|
return Name(self.parts + other.parts)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_snake_case(input: str) -> 'Name':
|
||||||
|
return Name(input.split("_"))
|
||||||
|
|
||||||
|
def __init__(self, parts: List[str]):
|
||||||
|
self.parts = parts
|
||||||
|
for p in parts:
|
||||||
|
assert len(p) > 0, "cannot add zero-length name piece"
|
||||||
|
|
||||||
|
def as_snake_case(self) -> str:
|
||||||
|
return "_".join([p.lower() for p in self.parts])
|
||||||
|
|
||||||
|
def as_camel_case(self) -> str:
|
||||||
|
out = ""
|
||||||
|
for p in self.parts:
|
||||||
|
# If we're about to join two parts which would introduce adjacent
|
||||||
|
# numbers, put an underscore between them.
|
||||||
|
if out[-1:].isnumeric() and p[:1].isnumeric():
|
||||||
|
out += "_" + p
|
||||||
|
else:
|
||||||
|
out += p.capitalize()
|
||||||
|
return out
|
||||||
|
|
||||||
|
def as_c_define(self) -> str:
|
||||||
|
return "_".join([p.upper() for p in self.parts])
|
||||||
|
|
||||||
|
def as_c_enum(self) -> str:
|
||||||
|
return "k" + self.as_camel_case()
|
||||||
|
|
||||||
|
def as_c_type(self) -> str:
|
||||||
|
return self.as_snake_case() + "_t"
|
||||||
|
|
||||||
|
def remove_part(self, part_to_remove: str) -> "Name":
|
||||||
|
return Name([p for p in self.parts if p != part_to_remove])
|
||||||
|
|
||||||
|
|
||||||
|
def is_ipcfg(ip: Path) -> bool: # return bool
|
||||||
|
log.info("IP Path: %s" % repr(ip))
|
||||||
|
ip_name = ip.parents[1].name
|
||||||
|
hjson_name = ip.name
|
||||||
|
|
||||||
|
log.info("IP Name(%s) and HJSON name (%s)" % (ip_name, hjson_name))
|
||||||
|
|
||||||
|
if ip_name + ".hjson" == hjson_name or ip_name + "_reg.hjson" == hjson_name:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def search_ips(ip_path): # return list of config files
|
||||||
|
# list the every Hjson file
|
||||||
|
p = ip_path.glob('*/data/*.hjson')
|
||||||
|
|
||||||
|
# filter only ip_name/data/ip_name{_reg|''}.hjson
|
||||||
|
ips = [x for x in p if is_ipcfg(x)]
|
||||||
|
|
||||||
|
log.info("Filtered-in IP files: %s" % repr(ips))
|
||||||
|
return ips
|
||||||
|
|
||||||
|
|
||||||
|
def is_xbarcfg(xbar_obj):
|
||||||
|
if "type" in xbar_obj and xbar_obj["type"] == "xbar":
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def get_hjsonobj_xbars(xbar_path):
|
||||||
|
""" Search crossbars Hjson files from given path.
|
||||||
|
|
||||||
|
Search every Hjson in the directory and check Hjson type.
|
||||||
|
It could be type: "top" or type: "xbar"
|
||||||
|
returns [(name, obj), ... ]
|
||||||
|
"""
|
||||||
|
p = xbar_path.glob('*.hjson')
|
||||||
|
try:
|
||||||
|
xbar_objs = [
|
||||||
|
hjson.load(x.open('r'),
|
||||||
|
use_decimal=True,
|
||||||
|
object_pairs_hook=OrderedDict) for x in p
|
||||||
|
]
|
||||||
|
except ValueError:
|
||||||
|
raise SystemExit(sys.exc_info()[1])
|
||||||
|
|
||||||
|
xbar_objs = [x for x in xbar_objs if is_xbarcfg(x)]
|
||||||
|
|
||||||
|
return xbar_objs
|
||||||
|
|
||||||
|
|
||||||
|
def get_module_by_name(top, name):
|
||||||
|
"""Search in top["module"] by name
|
||||||
|
"""
|
||||||
|
module = None
|
||||||
|
for m in top["module"]:
|
||||||
|
if m["name"] == name:
|
||||||
|
module = m
|
||||||
|
break
|
||||||
|
|
||||||
|
return module
|
||||||
|
|
||||||
|
|
||||||
|
def intersignal_to_signalname(top, m_name, s_name) -> str:
|
||||||
|
|
||||||
|
# TODO: Find the signal in the `inter_module_list` and get the correct signal name
|
||||||
|
|
||||||
|
return "{m_name}_{s_name}".format(m_name=m_name, s_name=s_name)
|
||||||
|
|
||||||
|
|
||||||
|
def get_package_name_by_intermodule_signal(top, struct) -> str:
|
||||||
|
"""Search inter-module signal package with the struct name
|
||||||
|
|
||||||
|
For instance, if `flash_ctrl` has inter-module signal package,
|
||||||
|
this function returns the package name
|
||||||
|
"""
|
||||||
|
instances = top["module"] + top["memory"]
|
||||||
|
|
||||||
|
intermodule_instances = [
|
||||||
|
x["inter_signal_list"] for x in instances if "inter_signal_list" in x
|
||||||
|
]
|
||||||
|
|
||||||
|
for m in intermodule_instances:
|
||||||
|
if m["name"] == struct and "package" in m:
|
||||||
|
return m["package"]
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
def get_signal_by_name(module, name):
|
||||||
|
"""Return the signal struct with the type input/output/inout
|
||||||
|
"""
|
||||||
|
result = None
|
||||||
|
for s in module["available_input_list"] + module[
|
||||||
|
"available_output_list"] + module["available_inout_list"]:
|
||||||
|
if s["name"] == name:
|
||||||
|
result = s
|
||||||
|
break
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def add_module_prefix_to_signal(signal, module):
|
||||||
|
"""Add module prefix to module signal format { name: "sig_name", width: NN }
|
||||||
|
"""
|
||||||
|
result = deepcopy(signal)
|
||||||
|
|
||||||
|
if "name" not in signal:
|
||||||
|
raise SystemExit("signal {} doesn't have name field".format(signal))
|
||||||
|
|
||||||
|
result["name"] = module + "_" + signal["name"]
|
||||||
|
result["module_name"] = module
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def get_ms_name(name):
|
||||||
|
"""Split module_name.signal_name to module_name , signal_name
|
||||||
|
"""
|
||||||
|
|
||||||
|
tokens = name.split('.')
|
||||||
|
|
||||||
|
if len(tokens) == 0:
|
||||||
|
raise SystemExit("This to be catched in validate.py")
|
||||||
|
|
||||||
|
module = tokens[0]
|
||||||
|
signal = None
|
||||||
|
if len(tokens) == 2:
|
||||||
|
signal = tokens[1]
|
||||||
|
|
||||||
|
return module, signal
|
||||||
|
|
||||||
|
|
||||||
|
def parse_pad_field(padstr):
|
||||||
|
"""Parse PadName[NN...NN] or PadName[NN] or just PadName
|
||||||
|
"""
|
||||||
|
match = re.match(r'^([A-Za-z0-9_]+)(\[([0-9]+)(\.\.([0-9]+))?\]|)', padstr)
|
||||||
|
return match.group(1), match.group(3), match.group(5)
|
||||||
|
|
||||||
|
|
||||||
|
def get_pad_list(padstr):
|
||||||
|
pads = []
|
||||||
|
|
||||||
|
pad, first, last = parse_pad_field(padstr)
|
||||||
|
if first is None:
|
||||||
|
first = 0
|
||||||
|
last = 0
|
||||||
|
elif last is None:
|
||||||
|
last = first
|
||||||
|
first = int(first, 0)
|
||||||
|
last = int(last, 0)
|
||||||
|
# width = first - last + 1
|
||||||
|
|
||||||
|
for p in range(first, last + 1):
|
||||||
|
pads.append(OrderedDict([("name", pad), ("index", p)]))
|
||||||
|
|
||||||
|
return pads
|
||||||
|
|
||||||
|
|
||||||
|
# Template functions
|
||||||
|
def ljust(x, width):
|
||||||
|
return "{:<{width}}".format(x, width=width)
|
||||||
|
|
||||||
|
|
||||||
|
def bitarray(d, width):
|
||||||
|
"""Print Systemverilog bit array
|
||||||
|
|
||||||
|
@param d the bit width of the signal
|
||||||
|
@param width max character width of the signal group
|
||||||
|
|
||||||
|
For instance, if width is 4, the max d value in the signal group could be
|
||||||
|
9999. If d is 2, then this function pads 3 spaces at the end of the bit
|
||||||
|
slice.
|
||||||
|
|
||||||
|
"[1:0] " <- d:=2, width=4
|
||||||
|
"[9999:0]" <- max d-1 value
|
||||||
|
|
||||||
|
If d is 1, it means array slice isn't necessary. So it returns empty spaces
|
||||||
|
"""
|
||||||
|
|
||||||
|
if d <= 0:
|
||||||
|
log.error("lib.bitarray: Given value {} is smaller than 1".format(d))
|
||||||
|
raise ValueError
|
||||||
|
if d == 1:
|
||||||
|
return " " * (width + 4) # [x:0] needs 4 more space than char_width
|
||||||
|
|
||||||
|
out = "[{}:0]".format(d - 1)
|
||||||
|
return out + (" " * (width - len(str(d))))
|
||||||
|
|
||||||
|
|
||||||
|
def parameterize(text):
|
||||||
|
"""Return the value wrapping with quote if not integer nor bits
|
||||||
|
"""
|
||||||
|
if re.match(r'(\d+\'[hdb]\s*[0-9a-f_A-F]+|[0-9]+)', text) is None:
|
||||||
|
return "\"{}\"".format(text)
|
||||||
|
|
||||||
|
return text
|
||||||
|
|
||||||
|
|
||||||
|
def index(i: int) -> str:
|
||||||
|
"""Return index if it is not -1
|
||||||
|
"""
|
||||||
|
return "[{}]".format(i) if i != -1 else ""
|
||||||
|
|
||||||
|
|
||||||
|
def get_clk_name(clk):
|
||||||
|
"""Return the appropriate clk name
|
||||||
|
"""
|
||||||
|
if clk == 'main':
|
||||||
|
return 'clk_i'
|
||||||
|
else:
|
||||||
|
return "clk_{}_i".format(clk)
|
||||||
|
|
||||||
|
|
||||||
|
def get_reset_path(reset, domain, top):
|
||||||
|
"""Return the appropriate reset path given name
|
||||||
|
"""
|
||||||
|
return top['resets'].get_path(reset, domain)
|
||||||
|
|
||||||
|
|
||||||
|
def get_unused_resets(top):
|
||||||
|
"""Return dict of unused resets and associated domain
|
||||||
|
"""
|
||||||
|
return top['resets'].get_unused_resets(top['power']['domains'])
|
||||||
|
|
||||||
|
|
||||||
|
def is_templated(module):
|
||||||
|
"""Returns an indication where a particular module is templated
|
||||||
|
"""
|
||||||
|
if "attr" not in module:
|
||||||
|
return False
|
||||||
|
elif module["attr"] in ["templated"]:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def is_top_reggen(module):
|
||||||
|
"""Returns an indication where a particular module is NOT templated
|
||||||
|
and requires top level specific reggen
|
||||||
|
"""
|
||||||
|
if "attr" not in module:
|
||||||
|
return False
|
||||||
|
elif module["attr"] in ["reggen_top", "reggen_only"]:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def is_inst(module):
|
||||||
|
"""Returns an indication where a particular module should be instantiated
|
||||||
|
in the top level
|
||||||
|
"""
|
||||||
|
top_level_module = False
|
||||||
|
top_level_mem = False
|
||||||
|
|
||||||
|
if "attr" not in module:
|
||||||
|
top_level_module = True
|
||||||
|
elif module["attr"] in ["normal", "templated", "reggen_top"]:
|
||||||
|
top_level_module = True
|
||||||
|
elif module["attr"] in ["reggen_only"]:
|
||||||
|
top_level_module = False
|
||||||
|
else:
|
||||||
|
raise ValueError('Attribute {} in {} is not valid'
|
||||||
|
.format(module['attr'], module['name']))
|
||||||
|
|
||||||
|
if module['type'] in ['rom', 'ram_1p_scr', 'eflash']:
|
||||||
|
top_level_mem = True
|
||||||
|
|
||||||
|
return top_level_mem or top_level_module
|
||||||
|
|
||||||
|
|
||||||
|
def get_base_and_size(name_to_block: Dict[str, IpBlock],
|
||||||
|
inst: Dict[str, object],
|
||||||
|
ifname: Optional[str]) -> Tuple[int, int]:
|
||||||
|
min_device_spacing = 0x1000
|
||||||
|
|
||||||
|
block = name_to_block.get(inst['type'])
|
||||||
|
if block is None:
|
||||||
|
# If inst isn't the instantiation of a block, it came from some memory.
|
||||||
|
# Memories have their sizes defined, so we can just look it up there.
|
||||||
|
bytes_used = int(inst['size'], 0)
|
||||||
|
|
||||||
|
# Memories don't have multiple or named interfaces, so this will only
|
||||||
|
# work if ifname is None.
|
||||||
|
assert ifname is None
|
||||||
|
base_addr = inst['base_addr']
|
||||||
|
|
||||||
|
else:
|
||||||
|
# If inst is the instantiation of some block, find the register block
|
||||||
|
# that corresponds to ifname
|
||||||
|
rb = block.reg_blocks.get(ifname)
|
||||||
|
if rb is None:
|
||||||
|
raise RuntimeError(
|
||||||
|
'Cannot connect to non-existent {} device interface '
|
||||||
|
'on {!r} (an instance of the {!r} block).'
|
||||||
|
.format('default' if ifname is None else repr(ifname),
|
||||||
|
inst['name'], block.name))
|
||||||
|
else:
|
||||||
|
bytes_used = 1 << rb.get_addr_width()
|
||||||
|
|
||||||
|
base_addr = inst['base_addrs'][ifname]
|
||||||
|
|
||||||
|
# If an instance has a nonempty "memory" field, take the memory
|
||||||
|
# size configuration from there.
|
||||||
|
if "memory" in inst:
|
||||||
|
if ifname in inst["memory"]:
|
||||||
|
memory_size = int(inst["memory"][ifname]["size"], 0)
|
||||||
|
if bytes_used > memory_size:
|
||||||
|
raise RuntimeError(
|
||||||
|
'Memory region on {} device interface '
|
||||||
|
'on {!r} (an instance of the {!r} block) '
|
||||||
|
'is smaller than the corresponding register block.'
|
||||||
|
.format('default' if ifname is None else repr(ifname),
|
||||||
|
inst['name'], block.name))
|
||||||
|
|
||||||
|
bytes_used = memory_size
|
||||||
|
|
||||||
|
# Round up to min_device_spacing if necessary
|
||||||
|
size_byte = max(bytes_used, min_device_spacing)
|
||||||
|
|
||||||
|
if isinstance(base_addr, str):
|
||||||
|
base_addr = int(base_addr, 0)
|
||||||
|
else:
|
||||||
|
assert isinstance(base_addr, int)
|
||||||
|
|
||||||
|
return (base_addr, size_byte)
|
||||||
|
|
||||||
|
|
||||||
|
def get_io_enum_literal(sig: Dict, prefix: str) -> str:
|
||||||
|
"""Returns the DIO pin enum literal with value assignment"""
|
||||||
|
name = Name.from_snake_case(prefix) + Name.from_snake_case(sig["name"])
|
||||||
|
# In this case, the signal is a multibit signal, and hence
|
||||||
|
# we have to make the signal index part of the parameter
|
||||||
|
# name to uniquify it.
|
||||||
|
if sig['width'] > 1:
|
||||||
|
name += Name([str(sig['idx'])])
|
||||||
|
return name.as_camel_case()
|
||||||
|
|
||||||
|
|
||||||
|
def make_bit_concatenation(sig_name: str,
|
||||||
|
indices: List[int],
|
||||||
|
end_indent: int) -> str:
|
||||||
|
'''Return SV code for concatenating certain indices from a signal
|
||||||
|
|
||||||
|
sig_name is the name of the signal and indices is a non-empty list of the
|
||||||
|
indices to use, MSB first. So
|
||||||
|
|
||||||
|
make_bit_concatenation("foo", [0, 100, 20])
|
||||||
|
|
||||||
|
should give
|
||||||
|
|
||||||
|
{foo[0], foo[100], foo[20]}
|
||||||
|
|
||||||
|
Adjacent bits turn into a range select. For example:
|
||||||
|
|
||||||
|
make_bit_concatenation("foo", [0, 1, 2])
|
||||||
|
|
||||||
|
should give
|
||||||
|
|
||||||
|
foo[0:2]
|
||||||
|
|
||||||
|
If there are multiple ranges, they are printed one to a line. end_indent
|
||||||
|
gives the indentation of the closing brace and the range selects in between
|
||||||
|
get indented to end_indent + 2.
|
||||||
|
|
||||||
|
'''
|
||||||
|
assert 0 <= end_indent
|
||||||
|
|
||||||
|
ranges = []
|
||||||
|
cur_range_start = indices[0]
|
||||||
|
cur_range_end = indices[0]
|
||||||
|
for idx in indices[1:]:
|
||||||
|
if idx == cur_range_end + 1 and cur_range_start <= cur_range_end:
|
||||||
|
cur_range_end += 1
|
||||||
|
continue
|
||||||
|
if idx == cur_range_end - 1 and cur_range_start >= cur_range_end:
|
||||||
|
cur_range_end -= 1
|
||||||
|
continue
|
||||||
|
ranges.append((cur_range_start, cur_range_end))
|
||||||
|
cur_range_start = idx
|
||||||
|
cur_range_end = idx
|
||||||
|
ranges.append((cur_range_start, cur_range_end))
|
||||||
|
|
||||||
|
items = []
|
||||||
|
for range_start, range_end in ranges:
|
||||||
|
if range_start == range_end:
|
||||||
|
select = str(range_start)
|
||||||
|
else:
|
||||||
|
select = '{}:{}'.format(range_start, range_end)
|
||||||
|
items.append('{}[{}]'.format(sig_name, select))
|
||||||
|
|
||||||
|
if len(items) == 1:
|
||||||
|
return items[0]
|
||||||
|
|
||||||
|
item_indent = '\n' + (' ' * (end_indent + 2))
|
||||||
|
|
||||||
|
acc = ['{', item_indent, items[0]]
|
||||||
|
for item in items[1:]:
|
||||||
|
acc += [',', item_indent, item]
|
||||||
|
acc += ['\n', ' ' * end_indent, '}']
|
||||||
|
return ''.join(acc)
|
||||||
|
|
||||||
|
|
||||||
|
def is_rom_ctrl(modules):
|
||||||
|
'''Return true if rom_ctrl (and thus boot-up rom integrity checking)
|
||||||
|
exists in the design
|
||||||
|
'''
|
||||||
|
for m in modules:
|
||||||
|
if m['type'] == 'rom_ctrl':
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,182 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
from typing import Dict, Optional
|
||||||
|
from .clocks import Clocks
|
||||||
|
|
||||||
|
|
||||||
|
class ResetItem:
|
||||||
|
'''Individual resets'''
|
||||||
|
def __init__(self, hier: Dict[str, str], raw: Dict[str, object], clocks: Clocks):
|
||||||
|
if not raw['name']:
|
||||||
|
raise ValueError('Reset has no name')
|
||||||
|
|
||||||
|
self.name = raw['name']
|
||||||
|
self.gen = raw.get('gen', True)
|
||||||
|
self.rst_type = raw.get('type', 'top')
|
||||||
|
|
||||||
|
self.path = ""
|
||||||
|
if self.rst_type == 'top':
|
||||||
|
self.path = f"{hier['top']}rst_{self.name}_n"
|
||||||
|
elif self.rst_type == 'ext':
|
||||||
|
self.path = f"{hier['ext']}{self.name}"
|
||||||
|
|
||||||
|
# to be constructed later
|
||||||
|
self.domains = []
|
||||||
|
self.shadowed = False
|
||||||
|
|
||||||
|
self.parent = raw.get('parent', "")
|
||||||
|
|
||||||
|
# This can be a source clock or a derived source
|
||||||
|
if self.rst_type != 'ext':
|
||||||
|
self.clock = clocks.get_clock_by_name(raw['clk'])
|
||||||
|
else:
|
||||||
|
self.clock = None
|
||||||
|
|
||||||
|
self.sw = bool(raw.get('sw', 0))
|
||||||
|
|
||||||
|
def _asdict(self) -> Dict[str, object]:
|
||||||
|
ret = {
|
||||||
|
'name': self.name,
|
||||||
|
'gen': self.gen,
|
||||||
|
'type': self.rst_type,
|
||||||
|
'domains': self.domains,
|
||||||
|
'shadowed': self.shadowed,
|
||||||
|
'sw': self.sw,
|
||||||
|
'path': self.path
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.parent:
|
||||||
|
ret['parent'] = self.parent
|
||||||
|
|
||||||
|
if self.clock:
|
||||||
|
ret['clock'] = self.clock.name
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
class Resets:
|
||||||
|
'''Resets for the chip'''
|
||||||
|
def __init__(self, raw: Dict[str, object], clocks: Clocks):
|
||||||
|
self.hier_paths = {}
|
||||||
|
assert isinstance(raw['hier_paths'], dict)
|
||||||
|
for rst_src, path in raw['hier_paths'].items():
|
||||||
|
self.hier_paths[str(rst_src)] = str(path)
|
||||||
|
|
||||||
|
assert isinstance(raw['nodes'], list)
|
||||||
|
|
||||||
|
self.nodes = {}
|
||||||
|
for node in raw['nodes']:
|
||||||
|
assert isinstance(node, dict)
|
||||||
|
reset = ResetItem(self.hier_paths, node, clocks)
|
||||||
|
self.nodes[reset.name] = reset
|
||||||
|
|
||||||
|
def _asdict(self) -> Dict[str, object]:
|
||||||
|
ret = {
|
||||||
|
'hier_paths': self.hier_paths,
|
||||||
|
'nodes': list(self.nodes.values())
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def get_reset_by_name(self, name: str) -> ResetItem:
|
||||||
|
|
||||||
|
ret = self.nodes.get(name, None)
|
||||||
|
if ret:
|
||||||
|
return ret
|
||||||
|
else:
|
||||||
|
raise ValueError(f'{name} is not a defined reset')
|
||||||
|
|
||||||
|
def mark_reset_shadowed(self, name: str):
|
||||||
|
'''Mark particular reset as requiring shadow'''
|
||||||
|
|
||||||
|
reset = self.get_reset_by_name(name)
|
||||||
|
reset.shadowed = True
|
||||||
|
|
||||||
|
def get_reset_domains(self, name: str):
|
||||||
|
'''Get available domains for a reset'''
|
||||||
|
|
||||||
|
return self.get_reset_by_name(name).domains
|
||||||
|
|
||||||
|
def get_clocks(self) -> list:
|
||||||
|
'''Get associated clocks'''
|
||||||
|
|
||||||
|
clocks = {}
|
||||||
|
for reset in self.nodes.values():
|
||||||
|
if reset.rst_type != 'ext':
|
||||||
|
clocks[reset.clock.name] = 1
|
||||||
|
|
||||||
|
return clocks.keys()
|
||||||
|
|
||||||
|
def get_generated_resets(self) -> Dict[str, object]:
|
||||||
|
'''Get generated resets and return dict with
|
||||||
|
with related clock
|
||||||
|
'''
|
||||||
|
|
||||||
|
ret = []
|
||||||
|
for reset in self.nodes.values():
|
||||||
|
if reset.gen:
|
||||||
|
entry = {}
|
||||||
|
entry['name'] = reset.name
|
||||||
|
entry['clk'] = reset.clock.name
|
||||||
|
entry['parent'] = reset.parent
|
||||||
|
entry['sw'] = reset.sw
|
||||||
|
ret.append(entry)
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def get_top_resets(self) -> list:
|
||||||
|
'''Get resets pushed to the top level'''
|
||||||
|
|
||||||
|
return [reset.name
|
||||||
|
for reset in self.nodes.values()
|
||||||
|
if reset.rst_type == 'top']
|
||||||
|
|
||||||
|
def get_sw_resets(self) -> list:
|
||||||
|
'''Get software controlled resets'''
|
||||||
|
|
||||||
|
return [reset.name
|
||||||
|
for reset in self.nodes.values()
|
||||||
|
if reset.sw]
|
||||||
|
|
||||||
|
def get_path(self, name: str, domain: Optional[str]) -> str:
|
||||||
|
'''Get path to reset'''
|
||||||
|
|
||||||
|
reset = self.get_reset_by_name(name)
|
||||||
|
if reset.rst_type == 'int':
|
||||||
|
raise ValueError(f'Reset {name} is not a reset exported from rstmgr')
|
||||||
|
|
||||||
|
if reset.rst_type == 'ext':
|
||||||
|
return reset.path
|
||||||
|
|
||||||
|
# if a generated reset
|
||||||
|
if domain:
|
||||||
|
return f'{reset.path}[rstmgr_pkg::Domain{domain}Sel]'
|
||||||
|
else:
|
||||||
|
return reset.path
|
||||||
|
|
||||||
|
def get_unused_resets(self, domains: list) -> Dict[str, str]:
|
||||||
|
'''Get unused resets'''
|
||||||
|
|
||||||
|
top_resets = [reset
|
||||||
|
for reset in self.nodes.values()
|
||||||
|
if reset.rst_type == 'top']
|
||||||
|
|
||||||
|
ret = {}
|
||||||
|
for reset in top_resets:
|
||||||
|
for dom in domains:
|
||||||
|
if dom not in reset.domains:
|
||||||
|
ret[reset.name] = dom
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def add_reset_domain(self, name: str, domain: str):
|
||||||
|
'''Mark particular reset as requiring shadow'''
|
||||||
|
|
||||||
|
reset = self.get_reset_by_name(name)
|
||||||
|
|
||||||
|
# Other reset types of hardwired domains
|
||||||
|
if reset.rst_type == 'top':
|
||||||
|
if domain not in reset.domains:
|
||||||
|
reset.domains.append(domain)
|
|
@ -0,0 +1,4 @@
|
||||||
|
# OpenTitan topgen templates
|
||||||
|
|
||||||
|
This directory contains templates used by topgen to assembly a chip toplevel.
|
||||||
|
|
|
@ -0,0 +1,17 @@
|
||||||
|
// Copyright lowRISC contributors.
|
||||||
|
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
// Generated by topgen.py
|
||||||
|
|
||||||
|
parameter string LIST_OF_ALERTS[] = {
|
||||||
|
% for alert in top["alert"]:
|
||||||
|
% if loop.last:
|
||||||
|
"${alert["name"]}"
|
||||||
|
% else:
|
||||||
|
"${alert["name"]}",
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
};
|
||||||
|
|
||||||
|
parameter uint NUM_ALERTS = ${len(top["alert"])};
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,4 @@
|
||||||
|
# This disables clang-format on all files in the sw/autogen directory.
|
||||||
|
# This is needed so that git-clang-format and similar scripts work.
|
||||||
|
DisableFormat: true
|
||||||
|
SortIncludes: false
|
|
@ -0,0 +1,21 @@
|
||||||
|
// Copyright lowRISC contributors.
|
||||||
|
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
//
|
||||||
|
// tb__alert_handler_connect.sv is auto-generated by `topgen.py` tool
|
||||||
|
|
||||||
|
<%
|
||||||
|
index = 0
|
||||||
|
module_name = ""
|
||||||
|
%>\
|
||||||
|
% for alert in top["alert"]:
|
||||||
|
% if alert["module_name"] == module_name:
|
||||||
|
<% index = index + 1 %>\
|
||||||
|
% else:
|
||||||
|
<%
|
||||||
|
module_name = alert["module_name"]
|
||||||
|
index = 0
|
||||||
|
%>\
|
||||||
|
% endif
|
||||||
|
assign alert_if[${loop.index}].alert_tx = `CHIP_HIER.u_${module_name}.alert_tx_o[${index}];
|
||||||
|
% endfor
|
|
@ -0,0 +1,123 @@
|
||||||
|
// Copyright lowRISC contributors.
|
||||||
|
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
//
|
||||||
|
// tb__xbar_connect generated by `topgen.py` tool
|
||||||
|
<%
|
||||||
|
from collections import OrderedDict
|
||||||
|
import topgen.lib as lib
|
||||||
|
|
||||||
|
top_hier = 'tb.dut.top_' + top["name"] + '.'
|
||||||
|
clk_hier = top_hier + top["clocks"].hier_paths["top"]
|
||||||
|
|
||||||
|
clk_src = OrderedDict()
|
||||||
|
for xbar in top["xbar"]:
|
||||||
|
for clk, src in xbar["clock_srcs"].items():
|
||||||
|
clk_src[clk] = src
|
||||||
|
|
||||||
|
clk_freq = OrderedDict()
|
||||||
|
for clock in top["clocks"].all_srcs.values():
|
||||||
|
if clock.name in clk_src.values():
|
||||||
|
clk_freq[clock.name] = clock.freq
|
||||||
|
|
||||||
|
hosts = OrderedDict()
|
||||||
|
devices = OrderedDict()
|
||||||
|
for xbar in top["xbar"]:
|
||||||
|
for node in xbar["nodes"]:
|
||||||
|
if node["type"] == "host" and not node["xbar"]:
|
||||||
|
hosts[node["name"]] = "clk_" + clk_src[node["clock"]]
|
||||||
|
elif node["type"] == "device" and not node["xbar"]:
|
||||||
|
devices[node["name"]] = "clk_" + clk_src[node["clock"]]
|
||||||
|
|
||||||
|
def escape_if_name(qual_if_name):
|
||||||
|
return qual_if_name.replace('.', '__')
|
||||||
|
|
||||||
|
%>\
|
||||||
|
<%text>
|
||||||
|
`define DRIVE_CHIP_TL_HOST_IF(tl_name, inst_name, sig_name) \
|
||||||
|
force ``tl_name``_tl_if.d2h = dut.top_earlgrey.u_``inst_name``.``sig_name``_i; \
|
||||||
|
force dut.top_earlgrey.u_``inst_name``.``sig_name``_o = ``tl_name``_tl_if.h2d; \
|
||||||
|
force dut.top_earlgrey.u_``inst_name``.clk_i = 0; \
|
||||||
|
uvm_config_db#(virtual tl_if)::set(null, $sformatf("*env.%0s_agent", `"tl_name`"), "vif", \
|
||||||
|
``tl_name``_tl_if);
|
||||||
|
|
||||||
|
`define DRIVE_CHIP_TL_DEVICE_IF(tl_name, inst_name, sig_name) \
|
||||||
|
force ``tl_name``_tl_if.h2d = dut.top_earlgrey.u_``inst_name``.``sig_name``_i; \
|
||||||
|
force dut.top_earlgrey.u_``inst_name``.``sig_name``_o = ``tl_name``_tl_if.d2h; \
|
||||||
|
force dut.top_earlgrey.u_``inst_name``.clk_i = 0; \
|
||||||
|
uvm_config_db#(virtual tl_if)::set(null, $sformatf("*env.%0s_agent", `"tl_name`"), "vif", \
|
||||||
|
``tl_name``_tl_if);
|
||||||
|
|
||||||
|
`define DRIVE_CHIP_TL_EXT_DEVICE_IF(tl_name, port_name) \
|
||||||
|
force ``tl_name``_tl_if.h2d = dut.top_earlgrey.``port_name``_req_o; \
|
||||||
|
force dut.top_earlgrey.``port_name``_rsp_i = ``tl_name``_tl_if.d2h; \
|
||||||
|
uvm_config_db#(virtual tl_if)::set(null, $sformatf("*env.%0s_agent", `"tl_name`"), "vif", \
|
||||||
|
``tl_name``_tl_if);
|
||||||
|
</%text>\
|
||||||
|
|
||||||
|
% for c in clk_freq.keys():
|
||||||
|
wire clk_${c};
|
||||||
|
clk_rst_if clk_rst_if_${c}(.clk(clk_${c}), .rst_n(rst_n));
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
% for i, clk in hosts.items():
|
||||||
|
tl_if ${escape_if_name(i)}_tl_if(${clk}, rst_n);
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
% for i, clk in devices.items():
|
||||||
|
tl_if ${escape_if_name(i)}_tl_if(${clk}, rst_n);
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
initial begin
|
||||||
|
bit xbar_mode;
|
||||||
|
void'($value$plusargs("xbar_mode=%0b", xbar_mode));
|
||||||
|
if (xbar_mode) begin
|
||||||
|
// only enable assertions in xbar as many pins are unconnected
|
||||||
|
$assertoff(0, tb);
|
||||||
|
% for xbar in top["xbar"]:
|
||||||
|
$asserton(0, tb.dut.top_${top["name"]}.u_xbar_${xbar["name"]});
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
% for c in clk_freq.keys():
|
||||||
|
clk_rst_if_${c}.set_active(.drive_rst_n_val(0));
|
||||||
|
clk_rst_if_${c}.set_freq_khz(${clk_freq[c]} / 1000);
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
// bypass clkmgr, force clocks directly
|
||||||
|
% for xbar in top["xbar"]:
|
||||||
|
% for clk, src in xbar["clock_srcs"].items():
|
||||||
|
force ${top_hier}u_xbar_${xbar["name"]}.${clk} = clk_${src};
|
||||||
|
% endfor
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
// bypass rstmgr, force resets directly
|
||||||
|
% for xbar in top["xbar"]:
|
||||||
|
% for rst in xbar["reset_connections"]:
|
||||||
|
force ${top_hier}u_xbar_${xbar["name"]}.${rst} = rst_n;
|
||||||
|
% endfor
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
% for xbar in top["xbar"]:
|
||||||
|
% for node in xbar["nodes"]:
|
||||||
|
<%
|
||||||
|
clk = 'clk_' + clk_src[node["clock"]]
|
||||||
|
esc_name = node['name'].replace('.', '__')
|
||||||
|
inst_sig_list = lib.find_otherside_modules(top, xbar["name"], 'tl_' + esc_name)
|
||||||
|
inst_name = inst_sig_list[0][1]
|
||||||
|
sig_name = inst_sig_list[0][2]
|
||||||
|
%>\
|
||||||
|
% if node["type"] == "host" and not node["xbar"]:
|
||||||
|
`DRIVE_CHIP_TL_HOST_IF(${esc_name}, ${inst_name}, ${sig_name})
|
||||||
|
% elif node["type"] == "device" and not node["xbar"] and node["stub"]:
|
||||||
|
`DRIVE_CHIP_TL_EXT_DEVICE_IF(${esc_name}, ${inst_name}_${sig_name})
|
||||||
|
% elif node["type"] == "device" and not node["xbar"]:
|
||||||
|
`DRIVE_CHIP_TL_DEVICE_IF(${esc_name}, ${inst_name}, ${sig_name})
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
% endfor
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
`undef DRIVE_CHIP_TL_HOST_IF
|
||||||
|
`undef DRIVE_CHIP_TL_DEVICE_IF
|
||||||
|
`undef DRIVE_CHIP_TL_EXT_DEVICE_IF
|
|
@ -0,0 +1,21 @@
|
||||||
|
// Copyright lowRISC contributors.
|
||||||
|
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
#include "${helper.header_path}"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* PLIC Interrupt Source to Peripheral Map
|
||||||
|
*
|
||||||
|
* This array is a mapping from `${helper.plic_interrupts.name.as_c_type()}` to
|
||||||
|
* `${helper.plic_sources.name.as_c_type()}`.
|
||||||
|
*/
|
||||||
|
${helper.plic_mapping.render_definition()}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Alert Handler Alert Source to Peripheral Map
|
||||||
|
*
|
||||||
|
* This array is a mapping from `${helper.alert_alerts.name.as_c_type()}` to
|
||||||
|
* `${helper.alert_sources.name.as_c_type()}`.
|
||||||
|
*/
|
||||||
|
${helper.alert_mapping.render_definition()}
|
|
@ -0,0 +1,201 @@
|
||||||
|
// Copyright lowRISC contributors.
|
||||||
|
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
#ifndef _TOP_${top["name"].upper()}_H_
|
||||||
|
#define _TOP_${top["name"].upper()}_H_
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @file
|
||||||
|
* @brief Top-specific Definitions
|
||||||
|
*
|
||||||
|
* This file contains preprocessor and type definitions for use within the
|
||||||
|
* device C/C++ codebase.
|
||||||
|
*
|
||||||
|
* These definitions are for information that depends on the top-specific chip
|
||||||
|
* configuration, which includes:
|
||||||
|
* - Device Memory Information (for Peripherals and Memory)
|
||||||
|
* - PLIC Interrupt ID Names and Source Mappings
|
||||||
|
* - Alert ID Names and Source Mappings
|
||||||
|
* - Pinmux Pin/Select Names
|
||||||
|
* - Power Manager Wakeups
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifdef __cplusplus
|
||||||
|
extern "C" {
|
||||||
|
#endif
|
||||||
|
|
||||||
|
% for (inst_name, if_name), region in helper.devices():
|
||||||
|
<%
|
||||||
|
if_desc = inst_name if if_name is None else '{} device on {}'.format(if_name, inst_name)
|
||||||
|
hex_base_addr = "0x{:X}u".format(region.base_addr)
|
||||||
|
hex_size_bytes = "0x{:X}u".format(region.size_bytes)
|
||||||
|
|
||||||
|
base_addr_name = region.base_addr_name().as_c_define()
|
||||||
|
size_bytes_name = region.size_bytes_name().as_c_define()
|
||||||
|
|
||||||
|
%>\
|
||||||
|
/**
|
||||||
|
* Peripheral base address for ${if_desc} in top ${top["name"]}.
|
||||||
|
*
|
||||||
|
* This should be used with #mmio_region_from_addr to access the memory-mapped
|
||||||
|
* registers associated with the peripheral (usually via a DIF).
|
||||||
|
*/
|
||||||
|
#define ${base_addr_name} ${hex_base_addr}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Peripheral size for ${if_desc} in top ${top["name"]}.
|
||||||
|
*
|
||||||
|
* This is the size (in bytes) of the peripheral's reserved memory area. All
|
||||||
|
* memory-mapped registers associated with this peripheral should have an
|
||||||
|
* address between #${base_addr_name} and
|
||||||
|
* `${base_addr_name} + ${size_bytes_name}`.
|
||||||
|
*/
|
||||||
|
#define ${size_bytes_name} ${hex_size_bytes}
|
||||||
|
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
% for name, region in helper.memories():
|
||||||
|
<%
|
||||||
|
hex_base_addr = "0x{:X}u".format(region.base_addr)
|
||||||
|
hex_size_bytes = "0x{:X}u".format(region.size_bytes)
|
||||||
|
|
||||||
|
base_addr_name = region.base_addr_name().as_c_define()
|
||||||
|
size_bytes_name = region.size_bytes_name().as_c_define()
|
||||||
|
|
||||||
|
%>\
|
||||||
|
/**
|
||||||
|
* Memory base address for ${name} in top ${top["name"]}.
|
||||||
|
*/
|
||||||
|
#define ${base_addr_name} ${hex_base_addr}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Memory size for ${name} in top ${top["name"]}.
|
||||||
|
*/
|
||||||
|
#define ${size_bytes_name} ${hex_size_bytes}
|
||||||
|
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
/**
|
||||||
|
* PLIC Interrupt Source Peripheral.
|
||||||
|
*
|
||||||
|
* Enumeration used to determine which peripheral asserted the corresponding
|
||||||
|
* interrupt.
|
||||||
|
*/
|
||||||
|
${helper.plic_sources.render()}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* PLIC Interrupt Source.
|
||||||
|
*
|
||||||
|
* Enumeration of all PLIC interrupt sources. The interrupt sources belonging to
|
||||||
|
* the same peripheral are guaranteed to be consecutive.
|
||||||
|
*/
|
||||||
|
${helper.plic_interrupts.render()}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* PLIC Interrupt Source to Peripheral Map
|
||||||
|
*
|
||||||
|
* This array is a mapping from `${helper.plic_interrupts.name.as_c_type()}` to
|
||||||
|
* `${helper.plic_sources.name.as_c_type()}`.
|
||||||
|
*/
|
||||||
|
${helper.plic_mapping.render_declaration()}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* PLIC Interrupt Target.
|
||||||
|
*
|
||||||
|
* Enumeration used to determine which set of IE, CC, threshold registers to
|
||||||
|
* access for a given interrupt target.
|
||||||
|
*/
|
||||||
|
${helper.plic_targets.render()}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Alert Handler Source Peripheral.
|
||||||
|
*
|
||||||
|
* Enumeration used to determine which peripheral asserted the corresponding
|
||||||
|
* alert.
|
||||||
|
*/
|
||||||
|
${helper.alert_sources.render()}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Alert Handler Alert Source.
|
||||||
|
*
|
||||||
|
* Enumeration of all Alert Handler Alert Sources. The alert sources belonging to
|
||||||
|
* the same peripheral are guaranteed to be consecutive.
|
||||||
|
*/
|
||||||
|
${helper.alert_alerts.render()}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Alert Handler Alert Source to Peripheral Map
|
||||||
|
*
|
||||||
|
* This array is a mapping from `${helper.alert_alerts.name.as_c_type()}` to
|
||||||
|
* `${helper.alert_sources.name.as_c_type()}`.
|
||||||
|
*/
|
||||||
|
${helper.alert_mapping.render_declaration()}
|
||||||
|
|
||||||
|
#define PINMUX_MIO_PERIPH_INSEL_IDX_OFFSET 2
|
||||||
|
|
||||||
|
// PERIPH_INSEL ranges from 0 to NUM_MIO_PADS + 2 -1}
|
||||||
|
// 0 and 1 are tied to value 0 and 1
|
||||||
|
#define NUM_MIO_PADS ${top["pinmux"]["io_counts"]["muxed"]["pads"]}
|
||||||
|
#define NUM_DIO_PADS ${top["pinmux"]["io_counts"]["dedicated"]["inouts"] + \
|
||||||
|
top["pinmux"]["io_counts"]["dedicated"]["inputs"] + \
|
||||||
|
top["pinmux"]["io_counts"]["dedicated"]["outputs"] }
|
||||||
|
|
||||||
|
#define PINMUX_PERIPH_OUTSEL_IDX_OFFSET 3
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pinmux Peripheral Input.
|
||||||
|
*/
|
||||||
|
${helper.pinmux_peripheral_in.render()}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pinmux MIO Input Selector.
|
||||||
|
*/
|
||||||
|
${helper.pinmux_insel.render()}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pinmux MIO Output.
|
||||||
|
*/
|
||||||
|
${helper.pinmux_mio_out.render()}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pinmux Peripheral Output Selector.
|
||||||
|
*/
|
||||||
|
${helper.pinmux_outsel.render()}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Power Manager Wakeup Signals
|
||||||
|
*/
|
||||||
|
${helper.pwrmgr_wakeups.render()}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reset Manager Software Controlled Resets
|
||||||
|
*/
|
||||||
|
${helper.rstmgr_sw_rsts.render()}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Power Manager Reset Request Signals
|
||||||
|
*/
|
||||||
|
${helper.pwrmgr_reset_requests.render()}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clock Manager Software-Controlled ("Gated") Clocks.
|
||||||
|
*
|
||||||
|
* The Software has full control over these clocks.
|
||||||
|
*/
|
||||||
|
${helper.clkmgr_gateable_clocks.render()}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clock Manager Software-Hinted Clocks.
|
||||||
|
*
|
||||||
|
* The Software has partial control over these clocks. It can ask them to stop,
|
||||||
|
* but the clock manager is in control of whether the clock actually is stopped.
|
||||||
|
*/
|
||||||
|
${helper.clkmgr_hintable_clocks.render()}
|
||||||
|
|
||||||
|
// Header Extern Guard
|
||||||
|
#ifdef __cplusplus
|
||||||
|
} // extern "C"
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#endif // _TOP_${top["name"].upper()}_H_
|
|
@ -0,0 +1,774 @@
|
||||||
|
// Copyright lowRISC contributors.
|
||||||
|
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
${gencmd}
|
||||||
|
<%
|
||||||
|
import re
|
||||||
|
import topgen.lib as lib
|
||||||
|
|
||||||
|
num_mio_inputs = top['pinmux']['io_counts']['muxed']['inouts'] + \
|
||||||
|
top['pinmux']['io_counts']['muxed']['inputs']
|
||||||
|
num_mio_outputs = top['pinmux']['io_counts']['muxed']['inouts'] + \
|
||||||
|
top['pinmux']['io_counts']['muxed']['outputs']
|
||||||
|
num_mio_pads = top['pinmux']['io_counts']['muxed']['pads']
|
||||||
|
|
||||||
|
num_dio_inputs = top['pinmux']['io_counts']['dedicated']['inouts'] + \
|
||||||
|
top['pinmux']['io_counts']['dedicated']['inputs']
|
||||||
|
num_dio_outputs = top['pinmux']['io_counts']['dedicated']['inouts'] + \
|
||||||
|
top['pinmux']['io_counts']['dedicated']['outputs']
|
||||||
|
num_dio_total = top['pinmux']['io_counts']['dedicated']['inouts'] + \
|
||||||
|
top['pinmux']['io_counts']['dedicated']['inputs'] + \
|
||||||
|
top['pinmux']['io_counts']['dedicated']['outputs']
|
||||||
|
|
||||||
|
num_im = sum([x["width"] if "width" in x else 1 for x in top["inter_signal"]["external"]])
|
||||||
|
|
||||||
|
max_sigwidth = max([x["width"] if "width" in x else 1 for x in top["pinmux"]["ios"]])
|
||||||
|
max_sigwidth = len("{}".format(max_sigwidth))
|
||||||
|
|
||||||
|
cpu_clk = top['clocks'].hier_paths['top'] + "clk_proc_main"
|
||||||
|
|
||||||
|
unused_resets = lib.get_unused_resets(top)
|
||||||
|
unused_im_defs, undriven_im_defs = lib.get_dangling_im_def(top["inter_signal"]["definitions"])
|
||||||
|
|
||||||
|
has_toplevel_rom = False
|
||||||
|
for m in top['memory']:
|
||||||
|
if m['type'] == 'rom':
|
||||||
|
has_toplevel_rom = True
|
||||||
|
|
||||||
|
%>\
|
||||||
|
module top_${top["name"]} #(
|
||||||
|
// Manually defined parameters
|
||||||
|
% if not lib.is_rom_ctrl(top["module"]):
|
||||||
|
parameter BootRomInitFile = "",
|
||||||
|
% endif
|
||||||
|
|
||||||
|
// Auto-inferred parameters
|
||||||
|
% for m in top["module"]:
|
||||||
|
% if not lib.is_inst(m):
|
||||||
|
<% continue %>
|
||||||
|
% endif
|
||||||
|
// parameters for ${m['name']}
|
||||||
|
% for p_exp in [p for p in m["param_list"] if p.get("expose") == "true" ]:
|
||||||
|
<%
|
||||||
|
p_type = p_exp.get('type')
|
||||||
|
p_type_word = p_type + ' ' if p_type else ''
|
||||||
|
|
||||||
|
p_lhs = f'{p_type_word}{p_exp["name_top"]}'
|
||||||
|
p_rhs = p_exp['default']
|
||||||
|
%>\
|
||||||
|
% if 12 + len(p_lhs) + 3 + len(p_rhs) + 1 < 100:
|
||||||
|
parameter ${p_lhs} = ${p_rhs}${"" if loop.parent.last & loop.last else ","}
|
||||||
|
% else:
|
||||||
|
parameter ${p_lhs} =
|
||||||
|
${p_rhs}${"" if loop.parent.last & loop.last else ","}
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
% endfor
|
||||||
|
) (
|
||||||
|
// Reset, clocks defined as part of intermodule
|
||||||
|
input rst_ni,
|
||||||
|
|
||||||
|
% if num_mio_pads != 0:
|
||||||
|
// Multiplexed I/O
|
||||||
|
input ${lib.bitarray(num_mio_pads, max_sigwidth)} mio_in_i,
|
||||||
|
output logic ${lib.bitarray(num_mio_pads, max_sigwidth)} mio_out_o,
|
||||||
|
output logic ${lib.bitarray(num_mio_pads, max_sigwidth)} mio_oe_o,
|
||||||
|
% endif
|
||||||
|
% if num_dio_total != 0:
|
||||||
|
// Dedicated I/O
|
||||||
|
input ${lib.bitarray(num_dio_total, max_sigwidth)} dio_in_i,
|
||||||
|
output logic ${lib.bitarray(num_dio_total, max_sigwidth)} dio_out_o,
|
||||||
|
output logic ${lib.bitarray(num_dio_total, max_sigwidth)} dio_oe_o,
|
||||||
|
% endif
|
||||||
|
|
||||||
|
% if "pinmux" in top:
|
||||||
|
// pad attributes to padring
|
||||||
|
output prim_pad_wrapper_pkg::pad_attr_t [pinmux_reg_pkg::NMioPads-1:0] mio_attr_o,
|
||||||
|
output prim_pad_wrapper_pkg::pad_attr_t [pinmux_reg_pkg::NDioPads-1:0] dio_attr_o,
|
||||||
|
% endif
|
||||||
|
|
||||||
|
% if num_im != 0:
|
||||||
|
|
||||||
|
// Inter-module Signal External type
|
||||||
|
% for sig in top["inter_signal"]["external"]:
|
||||||
|
${"input " if sig["direction"] == "in" else "output"} ${lib.im_defname(sig)} ${lib.bitarray(sig["width"],1)} ${sig["signame"]},
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
// Flash specific voltages
|
||||||
|
inout [1:0] flash_test_mode_a_io,
|
||||||
|
inout flash_test_voltage_h_io,
|
||||||
|
|
||||||
|
// OTP specific voltages
|
||||||
|
inout otp_ext_voltage_h_io,
|
||||||
|
|
||||||
|
% endif
|
||||||
|
input scan_rst_ni, // reset used for test mode
|
||||||
|
input scan_en_i,
|
||||||
|
input lc_ctrl_pkg::lc_tx_t scanmode_i // lc_ctrl_pkg::On for Scan
|
||||||
|
);
|
||||||
|
|
||||||
|
// JTAG IDCODE for development versions of this code.
|
||||||
|
// Manufacturers of OpenTitan chips must replace this code with one of their
|
||||||
|
// own IDs.
|
||||||
|
// Field structure as defined in the IEEE 1149.1 (JTAG) specification,
|
||||||
|
// section 12.1.1.
|
||||||
|
localparam logic [31:0] JTAG_IDCODE = {
|
||||||
|
4'h0, // Version
|
||||||
|
16'h4F54, // Part Number: "OT"
|
||||||
|
11'h426, // Manufacturer Identity: Google
|
||||||
|
1'b1 // (fixed)
|
||||||
|
};
|
||||||
|
|
||||||
|
import tlul_pkg::*;
|
||||||
|
import top_pkg::*;
|
||||||
|
import tl_main_pkg::*;
|
||||||
|
import top_${top["name"]}_pkg::*;
|
||||||
|
// Compile-time random constants
|
||||||
|
import top_${top["name"]}_rnd_cnst_pkg::*;
|
||||||
|
|
||||||
|
// Signals
|
||||||
|
logic [${num_mio_inputs - 1}:0] mio_p2d;
|
||||||
|
logic [${num_mio_outputs - 1}:0] mio_d2p;
|
||||||
|
logic [${num_mio_outputs - 1}:0] mio_en_d2p;
|
||||||
|
logic [${num_dio_total - 1}:0] dio_p2d;
|
||||||
|
logic [${num_dio_total - 1}:0] dio_d2p;
|
||||||
|
logic [${num_dio_total - 1}:0] dio_en_d2p;
|
||||||
|
% for m in top["module"]:
|
||||||
|
% if not lib.is_inst(m):
|
||||||
|
<% continue %>
|
||||||
|
% endif
|
||||||
|
<%
|
||||||
|
block = name_to_block[m['type']]
|
||||||
|
inouts, inputs, outputs = block.xputs
|
||||||
|
%>\
|
||||||
|
// ${m["name"]}
|
||||||
|
% for p_in in inputs + inouts:
|
||||||
|
logic ${lib.bitarray(p_in.bits.width(), max_sigwidth)} cio_${m["name"]}_${p_in.name}_p2d;
|
||||||
|
% endfor
|
||||||
|
% for p_out in outputs + inouts:
|
||||||
|
logic ${lib.bitarray(p_out.bits.width(), max_sigwidth)} cio_${m["name"]}_${p_out.name}_d2p;
|
||||||
|
logic ${lib.bitarray(p_out.bits.width(), max_sigwidth)} cio_${m["name"]}_${p_out.name}_en_d2p;
|
||||||
|
% endfor
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
|
||||||
|
<%
|
||||||
|
# Interrupt source 0 is tied to 0 to conform RISC-V PLIC spec.
|
||||||
|
# So, total number of interrupts are the number of entries in the list + 1
|
||||||
|
interrupt_num = sum([x["width"] if "width" in x else 1 for x in top["interrupt"]]) + 1
|
||||||
|
%>\
|
||||||
|
logic [${interrupt_num-1}:0] intr_vector;
|
||||||
|
// Interrupt source list
|
||||||
|
% for m in top["module"]:
|
||||||
|
<%
|
||||||
|
block = name_to_block[m['type']]
|
||||||
|
%>\
|
||||||
|
% if not lib.is_inst(m):
|
||||||
|
<% continue %>
|
||||||
|
% endif
|
||||||
|
% for intr in block.interrupts:
|
||||||
|
% if intr.bits.width() != 1:
|
||||||
|
logic [${intr.bits.width()-1}:0] intr_${m["name"]}_${intr.name};
|
||||||
|
% else:
|
||||||
|
logic intr_${m["name"]}_${intr.name};
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
// Alert list
|
||||||
|
prim_alert_pkg::alert_tx_t [alert_pkg::NAlerts-1:0] alert_tx;
|
||||||
|
prim_alert_pkg::alert_rx_t [alert_pkg::NAlerts-1:0] alert_rx;
|
||||||
|
|
||||||
|
% if not top["alert"]:
|
||||||
|
for (genvar k = 0; k < alert_pkg::NAlerts; k++) begin : gen_alert_tie_off
|
||||||
|
// tie off if no alerts present in the system
|
||||||
|
assign alert_tx[k].alert_p = 1'b0;
|
||||||
|
assign alert_tx[k].alert_n = 1'b1;
|
||||||
|
end
|
||||||
|
% endif
|
||||||
|
|
||||||
|
## Inter-module Definitions
|
||||||
|
% if len(top["inter_signal"]["definitions"]) >= 1:
|
||||||
|
// define inter-module signals
|
||||||
|
% endif
|
||||||
|
% for sig in top["inter_signal"]["definitions"]:
|
||||||
|
${lib.im_defname(sig)} ${lib.bitarray(sig["width"],1)} ${sig["signame"]};
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
## Mixed connection to port
|
||||||
|
## Index greater than 0 means a port is assigned to an inter-module array
|
||||||
|
## whereas an index of 0 means a port is directly driven by a module
|
||||||
|
// define mixed connection to port
|
||||||
|
% for port in top['inter_signal']['external']:
|
||||||
|
% if port['conn_type'] and port['index'] > 0:
|
||||||
|
% if port['direction'] == 'in':
|
||||||
|
assign ${port['netname']}[${port['index']}] = ${port['signame']};
|
||||||
|
% else:
|
||||||
|
assign ${port['signame']} = ${port['netname']}[${port['index']}];
|
||||||
|
% endif
|
||||||
|
% elif port['conn_type']:
|
||||||
|
% if port['direction'] == 'in':
|
||||||
|
assign ${port['netname']} = ${port['signame']};
|
||||||
|
% else:
|
||||||
|
assign ${port['signame']} = ${port['netname']};
|
||||||
|
% endif
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
## Partial inter-module definition tie-off
|
||||||
|
// define partial inter-module tie-off
|
||||||
|
% for sig in unused_im_defs:
|
||||||
|
% for idx in range(sig['end_idx'], sig['width']):
|
||||||
|
${lib.im_defname(sig)} unused_${sig["signame"]}${idx};
|
||||||
|
% endfor
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
// assign partial inter-module tie-off
|
||||||
|
% for sig in unused_im_defs:
|
||||||
|
% for idx in range(sig['end_idx'], sig['width']):
|
||||||
|
assign unused_${sig["signame"]}${idx} = ${sig["signame"]}[${idx}];
|
||||||
|
% endfor
|
||||||
|
% endfor
|
||||||
|
% for sig in undriven_im_defs:
|
||||||
|
% for idx in range(sig['end_idx'], sig['width']):
|
||||||
|
assign ${sig["signame"]}[${idx}] = ${sig["default"]};
|
||||||
|
% endfor
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
## Inter-module signal collection
|
||||||
|
|
||||||
|
% for m in top["module"]:
|
||||||
|
% if m["type"] == "otp_ctrl":
|
||||||
|
// OTP HW_CFG Broadcast signals.
|
||||||
|
// TODO(#6713): The actual struct breakout and mapping currently needs to
|
||||||
|
// be performed by hand.
|
||||||
|
assign csrng_otp_en_csrng_sw_app_read = otp_ctrl_otp_hw_cfg.data.en_csrng_sw_app_read;
|
||||||
|
assign entropy_src_otp_en_entropy_src_fw_read = otp_ctrl_otp_hw_cfg.data.en_entropy_src_fw_read;
|
||||||
|
assign entropy_src_otp_en_entropy_src_fw_over = otp_ctrl_otp_hw_cfg.data.en_entropy_src_fw_over;
|
||||||
|
assign sram_ctrl_main_otp_en_sram_ifetch = otp_ctrl_otp_hw_cfg.data.en_sram_ifetch;
|
||||||
|
assign sram_ctrl_ret_aon_otp_en_sram_ifetch = otp_ctrl_otp_hw_cfg.data.en_sram_ifetch;
|
||||||
|
assign lc_ctrl_otp_device_id = otp_ctrl_otp_hw_cfg.data.device_id;
|
||||||
|
assign lc_ctrl_otp_manuf_state = otp_ctrl_otp_hw_cfg.data.manuf_state;
|
||||||
|
assign keymgr_otp_device_id = otp_ctrl_otp_hw_cfg.data.device_id;
|
||||||
|
|
||||||
|
logic unused_otp_hw_cfg_bits;
|
||||||
|
assign unused_otp_hw_cfg_bits = ^{
|
||||||
|
otp_ctrl_otp_hw_cfg.valid,
|
||||||
|
otp_ctrl_otp_hw_cfg.data.hw_cfg_digest,
|
||||||
|
otp_ctrl_otp_hw_cfg.data.unallocated
|
||||||
|
};
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
// Unused reset signals
|
||||||
|
% for k, v in unused_resets.items():
|
||||||
|
logic unused_d${v.lower()}_rst_${k};
|
||||||
|
% endfor
|
||||||
|
% for k, v in unused_resets.items():
|
||||||
|
assign unused_d${v.lower()}_rst_${k} = ${lib.get_reset_path(k, v, top)};
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
// ibex specific assignments
|
||||||
|
// TODO: This should be further automated in the future.
|
||||||
|
assign rv_core_ibex_irq_timer = intr_rv_timer_timer_expired_0_0;
|
||||||
|
assign rv_core_ibex_hart_id = '0;
|
||||||
|
|
||||||
|
## Not all top levels have a rom controller.
|
||||||
|
## For those that do not, reference the ROM directly.
|
||||||
|
% if lib.is_rom_ctrl(top["module"]):
|
||||||
|
assign rv_core_ibex_boot_addr = ADDR_SPACE_ROM_CTRL__ROM;
|
||||||
|
% else:
|
||||||
|
assign rv_core_ibex_boot_addr = ADDR_SPACE_ROM;
|
||||||
|
% endif
|
||||||
|
|
||||||
|
// Struct breakout module tool-inserted DFT TAP signals
|
||||||
|
pinmux_jtag_breakout u_dft_tap_breakout (
|
||||||
|
.req_i (pinmux_aon_dft_jtag_req),
|
||||||
|
.rsp_o (pinmux_aon_dft_jtag_rsp),
|
||||||
|
.tck_o (),
|
||||||
|
.trst_no (),
|
||||||
|
.tms_o (),
|
||||||
|
.tdi_o (),
|
||||||
|
.tdo_i (1'b0),
|
||||||
|
.tdo_oe_i (1'b0)
|
||||||
|
);
|
||||||
|
|
||||||
|
## Memory Instantiation
|
||||||
|
% for m in top["memory"]:
|
||||||
|
<%
|
||||||
|
resets = m['reset_connections']
|
||||||
|
clocks = m['clock_connections']
|
||||||
|
%>\
|
||||||
|
% if m["type"] == "ram_1p_scr":
|
||||||
|
<%
|
||||||
|
data_width = int(top["datawidth"])
|
||||||
|
full_data_width = data_width + int(m["integ_width"])
|
||||||
|
dw_byte = data_width // 8
|
||||||
|
addr_width = ((int(m["size"], 0) // dw_byte) -1).bit_length()
|
||||||
|
sram_depth = (int(m["size"], 0) // dw_byte)
|
||||||
|
max_char = len(str(max(data_width, addr_width)))
|
||||||
|
%>\
|
||||||
|
// sram device
|
||||||
|
logic ${lib.bitarray(1, max_char)} ${m["name"]}_req;
|
||||||
|
logic ${lib.bitarray(1, max_char)} ${m["name"]}_gnt;
|
||||||
|
logic ${lib.bitarray(1, max_char)} ${m["name"]}_we;
|
||||||
|
logic ${lib.bitarray(1, max_char)} ${m["name"]}_intg_err;
|
||||||
|
logic ${lib.bitarray(addr_width, max_char)} ${m["name"]}_addr;
|
||||||
|
logic ${lib.bitarray(full_data_width, max_char)} ${m["name"]}_wdata;
|
||||||
|
logic ${lib.bitarray(full_data_width, max_char)} ${m["name"]}_wmask;
|
||||||
|
logic ${lib.bitarray(full_data_width, max_char)} ${m["name"]}_rdata;
|
||||||
|
logic ${lib.bitarray(1, max_char)} ${m["name"]}_rvalid;
|
||||||
|
logic ${lib.bitarray(2, max_char)} ${m["name"]}_rerror;
|
||||||
|
|
||||||
|
tlul_adapter_sram #(
|
||||||
|
.SramAw(${addr_width}),
|
||||||
|
.SramDw(${data_width}),
|
||||||
|
.Outstanding(2),
|
||||||
|
.CmdIntgCheck(1),
|
||||||
|
.EnableRspIntgGen(1),
|
||||||
|
.EnableDataIntgGen(0),
|
||||||
|
.EnableDataIntgPt(1)
|
||||||
|
) u_tl_adapter_${m["name"]} (
|
||||||
|
% for key in clocks:
|
||||||
|
.${key} (${clocks[key]}),
|
||||||
|
% endfor
|
||||||
|
% for port, reset in resets.items():
|
||||||
|
.${port} (${lib.get_reset_path(reset, m['domain'], top)}),
|
||||||
|
% endfor
|
||||||
|
.tl_i (${m["name"]}_tl_req),
|
||||||
|
.tl_o (${m["name"]}_tl_rsp),
|
||||||
|
.en_ifetch_i (${m["inter_signal_list"][3]["top_signame"]}),
|
||||||
|
.req_o (${m["name"]}_req),
|
||||||
|
.req_type_o (),
|
||||||
|
.gnt_i (${m["name"]}_gnt),
|
||||||
|
.we_o (${m["name"]}_we),
|
||||||
|
.addr_o (${m["name"]}_addr),
|
||||||
|
.wdata_o (${m["name"]}_wdata),
|
||||||
|
.wmask_o (${m["name"]}_wmask),
|
||||||
|
.intg_error_o(${m["name"]}_intg_err),
|
||||||
|
.rdata_i (${m["name"]}_rdata),
|
||||||
|
.rvalid_i (${m["name"]}_rvalid),
|
||||||
|
.rerror_i (${m["name"]}_rerror)
|
||||||
|
);
|
||||||
|
|
||||||
|
<%
|
||||||
|
mem_name = m["name"].split("_")
|
||||||
|
mem_name = lib.Name(mem_name[1:])
|
||||||
|
%>\
|
||||||
|
prim_ram_1p_scr #(
|
||||||
|
.Width(${full_data_width}),
|
||||||
|
.Depth(${sram_depth}),
|
||||||
|
.EnableParity(0),
|
||||||
|
.LfsrWidth(${data_width}),
|
||||||
|
.StatePerm(RndCnstSramCtrl${mem_name.as_camel_case()}SramLfsrPerm),
|
||||||
|
.DataBitsPerMask(1), // TODO: Temporary change to ensure byte updates can still be done
|
||||||
|
.DiffWidth(8)
|
||||||
|
) u_ram1p_${m["name"]} (
|
||||||
|
% for key in clocks:
|
||||||
|
.${key} (${clocks[key]}),
|
||||||
|
% endfor
|
||||||
|
% for port, reset in resets.items():
|
||||||
|
.${port} (${lib.get_reset_path(reset, m['domain'], top)}),
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
.key_valid_i (${m["inter_signal_list"][1]["top_signame"]}_req.valid),
|
||||||
|
.key_i (${m["inter_signal_list"][1]["top_signame"]}_req.key),
|
||||||
|
.nonce_i (${m["inter_signal_list"][1]["top_signame"]}_req.nonce),
|
||||||
|
.init_req_i (${m["inter_signal_list"][2]["top_signame"]}_req.req),
|
||||||
|
.init_seed_i (${m["inter_signal_list"][2]["top_signame"]}_req.seed),
|
||||||
|
.init_ack_o (${m["inter_signal_list"][2]["top_signame"]}_rsp.ack),
|
||||||
|
|
||||||
|
.req_i (${m["name"]}_req),
|
||||||
|
.intg_error_i(${m["name"]}_intg_err),
|
||||||
|
.gnt_o (${m["name"]}_gnt),
|
||||||
|
.write_i (${m["name"]}_we),
|
||||||
|
.addr_i (${m["name"]}_addr),
|
||||||
|
.wdata_i (${m["name"]}_wdata),
|
||||||
|
.wmask_i (${m["name"]}_wmask),
|
||||||
|
.rdata_o (${m["name"]}_rdata),
|
||||||
|
.rvalid_o (${m["name"]}_rvalid),
|
||||||
|
.rerror_o (${m["name"]}_rerror),
|
||||||
|
.raddr_o (${m["inter_signal_list"][1]["top_signame"]}_rsp.raddr),
|
||||||
|
.intg_error_o(${m["inter_signal_list"][4]["top_signame"]}),
|
||||||
|
.cfg_i (ram_1p_cfg_i)
|
||||||
|
);
|
||||||
|
|
||||||
|
assign ${m["inter_signal_list"][1]["top_signame"]}_rsp.rerror = ${m["name"]}_rerror;
|
||||||
|
|
||||||
|
% elif m["type"] == "rom":
|
||||||
|
<%
|
||||||
|
data_width = int(top["datawidth"])
|
||||||
|
full_data_width = data_width + int(m['integ_width'])
|
||||||
|
dw_byte = data_width // 8
|
||||||
|
addr_width = ((int(m["size"], 0) // dw_byte) -1).bit_length()
|
||||||
|
rom_depth = (int(m["size"], 0) // dw_byte)
|
||||||
|
max_char = len(str(max(data_width, addr_width)))
|
||||||
|
%>\
|
||||||
|
// ROM device
|
||||||
|
logic ${lib.bitarray(1, max_char)} ${m["name"]}_req;
|
||||||
|
logic ${lib.bitarray(addr_width, max_char)} ${m["name"]}_addr;
|
||||||
|
logic ${lib.bitarray(full_data_width, max_char)} ${m["name"]}_rdata;
|
||||||
|
logic ${lib.bitarray(1, max_char)} ${m["name"]}_rvalid;
|
||||||
|
|
||||||
|
tlul_adapter_sram #(
|
||||||
|
.SramAw(${addr_width}),
|
||||||
|
.SramDw(${data_width}),
|
||||||
|
.Outstanding(2),
|
||||||
|
.ErrOnWrite(1),
|
||||||
|
.CmdIntgCheck(1),
|
||||||
|
.EnableRspIntgGen(1),
|
||||||
|
.EnableDataIntgGen(0)
|
||||||
|
) u_tl_adapter_${m["name"]} (
|
||||||
|
% for key in clocks:
|
||||||
|
.${key} (${clocks[key]}),
|
||||||
|
% endfor
|
||||||
|
% for port, reset in resets.items():
|
||||||
|
.${port} (${lib.get_reset_path(reset, m['domain'], top)}),
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
.tl_i (${m["name"]}_tl_req),
|
||||||
|
.tl_o (${m["name"]}_tl_rsp),
|
||||||
|
.en_ifetch_i (tlul_pkg::InstrEn),
|
||||||
|
.req_o (${m["name"]}_req),
|
||||||
|
.req_type_o (),
|
||||||
|
.gnt_i (1'b1), // Always grant as only one requester exists
|
||||||
|
.we_o (),
|
||||||
|
.addr_o (${m["name"]}_addr),
|
||||||
|
.wdata_o (),
|
||||||
|
.wmask_o (),
|
||||||
|
.intg_error_o(), // Connect to ROM checker and ROM scramble later
|
||||||
|
.rdata_i (${m["name"]}_rdata[${data_width-1}:0]),
|
||||||
|
.rvalid_i (${m["name"]}_rvalid),
|
||||||
|
.rerror_i (2'b00)
|
||||||
|
);
|
||||||
|
|
||||||
|
prim_rom_adv #(
|
||||||
|
.Width(${full_data_width}),
|
||||||
|
.Depth(${rom_depth}),
|
||||||
|
.MemInitFile(BootRomInitFile)
|
||||||
|
) u_rom_${m["name"]} (
|
||||||
|
% for key in clocks:
|
||||||
|
.${key} (${clocks[key]}),
|
||||||
|
% endfor
|
||||||
|
% for port, reset in resets.items():
|
||||||
|
.${port} (${lib.get_reset_path(reset, m['domain'], top)}),
|
||||||
|
% endfor
|
||||||
|
.req_i (${m["name"]}_req),
|
||||||
|
.addr_i (${m["name"]}_addr),
|
||||||
|
.rdata_o (${m["name"]}_rdata),
|
||||||
|
.rvalid_o (${m["name"]}_rvalid),
|
||||||
|
.cfg_i (rom_cfg_i)
|
||||||
|
);
|
||||||
|
|
||||||
|
% elif m["type"] == "eflash":
|
||||||
|
|
||||||
|
// host to flash communication
|
||||||
|
logic flash_host_req;
|
||||||
|
tlul_pkg::tl_type_e flash_host_req_type;
|
||||||
|
logic flash_host_req_rdy;
|
||||||
|
logic flash_host_req_done;
|
||||||
|
logic flash_host_rderr;
|
||||||
|
logic [flash_ctrl_pkg::BusWidth-1:0] flash_host_rdata;
|
||||||
|
logic [flash_ctrl_pkg::BusAddrW-1:0] flash_host_addr;
|
||||||
|
logic flash_host_intg_err;
|
||||||
|
|
||||||
|
tlul_adapter_sram #(
|
||||||
|
.SramAw(flash_ctrl_pkg::BusAddrW),
|
||||||
|
.SramDw(flash_ctrl_pkg::BusWidth),
|
||||||
|
.Outstanding(2),
|
||||||
|
.ByteAccess(0),
|
||||||
|
.ErrOnWrite(1),
|
||||||
|
.CmdIntgCheck(1),
|
||||||
|
.EnableRspIntgGen(1),
|
||||||
|
.EnableDataIntgGen(1)
|
||||||
|
) u_tl_adapter_${m["name"]} (
|
||||||
|
% for key in clocks:
|
||||||
|
.${key} (${clocks[key]}),
|
||||||
|
% endfor
|
||||||
|
% for port, reset in resets.items():
|
||||||
|
.${port} (${lib.get_reset_path(reset, m['domain'], top)}),
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
.tl_i (${m["name"]}_tl_req),
|
||||||
|
.tl_o (${m["name"]}_tl_rsp),
|
||||||
|
.en_ifetch_i (tlul_pkg::InstrEn), // tie this to secure boot somehow
|
||||||
|
.req_o (flash_host_req),
|
||||||
|
.req_type_o (flash_host_req_type),
|
||||||
|
.gnt_i (flash_host_req_rdy),
|
||||||
|
.we_o (),
|
||||||
|
.addr_o (flash_host_addr),
|
||||||
|
.wdata_o (),
|
||||||
|
.wmask_o (),
|
||||||
|
.intg_error_o(flash_host_intg_err),
|
||||||
|
.rdata_i (flash_host_rdata),
|
||||||
|
.rvalid_i (flash_host_req_done),
|
||||||
|
.rerror_i ({flash_host_rderr,1'b0})
|
||||||
|
);
|
||||||
|
|
||||||
|
flash_phy u_flash_${m["name"]} (
|
||||||
|
% for key in clocks:
|
||||||
|
.${key} (${clocks[key]}),
|
||||||
|
% endfor
|
||||||
|
% for port, reset in resets.items():
|
||||||
|
.${port} (${lib.get_reset_path(reset, m['domain'], top)}),
|
||||||
|
% endfor
|
||||||
|
.host_req_i (flash_host_req),
|
||||||
|
.host_intg_err_i (flash_host_intg_err),
|
||||||
|
.host_req_type_i (flash_host_req_type),
|
||||||
|
.host_addr_i (flash_host_addr),
|
||||||
|
.host_req_rdy_o (flash_host_req_rdy),
|
||||||
|
.host_req_done_o (flash_host_req_done),
|
||||||
|
.host_rderr_o (flash_host_rderr),
|
||||||
|
.host_rdata_o (flash_host_rdata),
|
||||||
|
.flash_ctrl_i (${m["inter_signal_list"][0]["top_signame"]}_req),
|
||||||
|
.flash_ctrl_o (${m["inter_signal_list"][0]["top_signame"]}_rsp),
|
||||||
|
.lc_nvm_debug_en_i (${m["inter_signal_list"][2]["top_signame"]}),
|
||||||
|
.flash_bist_enable_i,
|
||||||
|
.flash_power_down_h_i,
|
||||||
|
.flash_power_ready_h_i,
|
||||||
|
.flash_test_mode_a_io,
|
||||||
|
.flash_test_voltage_h_io,
|
||||||
|
.flash_alert_o,
|
||||||
|
.scanmode_i,
|
||||||
|
.scan_en_i,
|
||||||
|
.scan_rst_ni
|
||||||
|
);
|
||||||
|
|
||||||
|
% else:
|
||||||
|
// flash memory is embedded within controller
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
## Peripheral Instantiation
|
||||||
|
|
||||||
|
<% alert_idx = 0 %>
|
||||||
|
% for m in top["module"]:
|
||||||
|
<%
|
||||||
|
if not lib.is_inst(m):
|
||||||
|
continue
|
||||||
|
|
||||||
|
block = name_to_block[m['type']]
|
||||||
|
inouts, inputs, outputs = block.xputs
|
||||||
|
|
||||||
|
port_list = inputs + outputs + inouts
|
||||||
|
max_sigwidth = max(len(x.name) for x in port_list) if port_list else 0
|
||||||
|
max_intrwidth = (max(len(x.name) for x in block.interrupts)
|
||||||
|
if block.interrupts else 0)
|
||||||
|
%>\
|
||||||
|
% if m["param_list"] or block.alerts:
|
||||||
|
${m["type"]} #(
|
||||||
|
% if block.alerts:
|
||||||
|
<%
|
||||||
|
w = len(block.alerts)
|
||||||
|
slice = str(alert_idx+w-1) + ":" + str(alert_idx)
|
||||||
|
%>\
|
||||||
|
.AlertAsyncOn(alert_handler_reg_pkg::AsyncOn[${slice}])${"," if m["param_list"] else ""}
|
||||||
|
% endif
|
||||||
|
% for i in m["param_list"]:
|
||||||
|
.${i["name"]}(${i["name_top" if i.get("expose") == "true" or i.get("randtype", "none") != "none" else "default"]})${"," if not loop.last else ""}
|
||||||
|
% endfor
|
||||||
|
) u_${m["name"]} (
|
||||||
|
% else:
|
||||||
|
${m["type"]} u_${m["name"]} (
|
||||||
|
% endif
|
||||||
|
% for p_in in inputs + inouts:
|
||||||
|
% if loop.first:
|
||||||
|
|
||||||
|
// Input
|
||||||
|
% endif
|
||||||
|
.${lib.ljust("cio_"+p_in.name+"_i",max_sigwidth+9)} (cio_${m["name"]}_${p_in.name}_p2d),
|
||||||
|
% endfor
|
||||||
|
% for p_out in outputs + inouts:
|
||||||
|
% if loop.first:
|
||||||
|
|
||||||
|
// Output
|
||||||
|
% endif
|
||||||
|
.${lib.ljust("cio_"+p_out.name+"_o", max_sigwidth+9)} (cio_${m["name"]}_${p_out.name}_d2p),
|
||||||
|
.${lib.ljust("cio_"+p_out.name+"_en_o",max_sigwidth+9)} (cio_${m["name"]}_${p_out.name}_en_d2p),
|
||||||
|
% endfor
|
||||||
|
% for intr in block.interrupts:
|
||||||
|
% if loop.first:
|
||||||
|
|
||||||
|
// Interrupt
|
||||||
|
% endif
|
||||||
|
.${lib.ljust("intr_"+intr.name+"_o",max_intrwidth+7)} (intr_${m["name"]}_${intr.name}),
|
||||||
|
% endfor
|
||||||
|
% if block.alerts:
|
||||||
|
% for alert in block.alerts:
|
||||||
|
// [${alert_idx}]: ${alert.name}<% alert_idx += 1 %>
|
||||||
|
% endfor
|
||||||
|
.alert_tx_o ( alert_tx[${slice}] ),
|
||||||
|
.alert_rx_i ( alert_rx[${slice}] ),
|
||||||
|
% endif
|
||||||
|
## TODO: Inter-module Connection
|
||||||
|
% if m.get('inter_signal_list'):
|
||||||
|
|
||||||
|
// Inter-module signals
|
||||||
|
% for sig in m['inter_signal_list']:
|
||||||
|
## TODO: handle below condition in lib.py
|
||||||
|
% if sig['type'] == "req_rsp":
|
||||||
|
.${lib.im_portname(sig,"req")}(${lib.im_netname(sig, "req")}),
|
||||||
|
.${lib.im_portname(sig,"rsp")}(${lib.im_netname(sig, "rsp")}),
|
||||||
|
% elif sig['type'] == "uni":
|
||||||
|
## TODO: Broadcast type
|
||||||
|
## TODO: default for logic type
|
||||||
|
.${lib.im_portname(sig)}(${lib.im_netname(sig)}),
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
% endif
|
||||||
|
% if m["type"] == "rv_plic":
|
||||||
|
.intr_src_i (intr_vector),
|
||||||
|
% endif
|
||||||
|
% if m["type"] == "pinmux":
|
||||||
|
|
||||||
|
.periph_to_mio_i (mio_d2p ),
|
||||||
|
.periph_to_mio_oe_i (mio_en_d2p ),
|
||||||
|
.mio_to_periph_o (mio_p2d ),
|
||||||
|
|
||||||
|
.mio_attr_o,
|
||||||
|
.mio_out_o,
|
||||||
|
.mio_oe_o,
|
||||||
|
.mio_in_i,
|
||||||
|
|
||||||
|
.periph_to_dio_i (dio_d2p ),
|
||||||
|
.periph_to_dio_oe_i (dio_en_d2p ),
|
||||||
|
.dio_to_periph_o (dio_p2d ),
|
||||||
|
|
||||||
|
.dio_attr_o,
|
||||||
|
.dio_out_o,
|
||||||
|
.dio_oe_o,
|
||||||
|
.dio_in_i,
|
||||||
|
|
||||||
|
% endif
|
||||||
|
% if m["type"] == "alert_handler":
|
||||||
|
// alert signals
|
||||||
|
.alert_rx_o ( alert_rx ),
|
||||||
|
.alert_tx_i ( alert_tx ),
|
||||||
|
% endif
|
||||||
|
% if m["type"] == "otp_ctrl":
|
||||||
|
.otp_ext_voltage_h_io,
|
||||||
|
% endif
|
||||||
|
% if block.scan:
|
||||||
|
.scanmode_i,
|
||||||
|
% endif
|
||||||
|
% if block.scan_reset:
|
||||||
|
.scan_rst_ni,
|
||||||
|
% endif
|
||||||
|
% if block.scan_en:
|
||||||
|
.scan_en_i,
|
||||||
|
% endif
|
||||||
|
|
||||||
|
// Clock and reset connections
|
||||||
|
% for k, v in m["clock_connections"].items():
|
||||||
|
.${k} (${v}),
|
||||||
|
% endfor
|
||||||
|
% for port, reset in m["reset_connections"].items():
|
||||||
|
.${port} (${lib.get_reset_path(reset, m['domain'], top)})${"," if not loop.last else ""}
|
||||||
|
% endfor
|
||||||
|
);
|
||||||
|
|
||||||
|
% endfor
|
||||||
|
// interrupt assignments
|
||||||
|
<% base = interrupt_num %>\
|
||||||
|
assign intr_vector = {
|
||||||
|
% for intr in top["interrupt"][::-1]:
|
||||||
|
<% base -= intr["width"] %>\
|
||||||
|
intr_${intr["name"]}, // IDs [${base} +: ${intr['width']}]
|
||||||
|
% endfor
|
||||||
|
1'b 0 // ID [0 +: 1] is a special case and tied to zero.
|
||||||
|
};
|
||||||
|
|
||||||
|
// TL-UL Crossbar
|
||||||
|
% for xbar in top["xbar"]:
|
||||||
|
<%
|
||||||
|
name_len = max([len(x["name"]) for x in xbar["nodes"]]);
|
||||||
|
%>\
|
||||||
|
xbar_${xbar["name"]} u_xbar_${xbar["name"]} (
|
||||||
|
% for k, v in xbar["clock_connections"].items():
|
||||||
|
.${k} (${v}),
|
||||||
|
% endfor
|
||||||
|
% for port, reset in xbar["reset_connections"].items():
|
||||||
|
.${port} (${lib.get_reset_path(reset, xbar["domain"], top)}),
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
## Inter-module signal
|
||||||
|
% for sig in xbar["inter_signal_list"]:
|
||||||
|
<% assert sig['type'] == "req_rsp" %>\
|
||||||
|
// port: ${sig['name']}
|
||||||
|
.${lib.im_portname(sig,"req")}(${lib.im_netname(sig, "req")}),
|
||||||
|
.${lib.im_portname(sig,"rsp")}(${lib.im_netname(sig, "rsp")}),
|
||||||
|
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
.scanmode_i
|
||||||
|
);
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
% if "pinmux" in top:
|
||||||
|
// Pinmux connections
|
||||||
|
// All muxed inputs
|
||||||
|
% for sig in top["pinmux"]["ios"]:
|
||||||
|
% if sig["connection"] == "muxed" and sig["type"] in ["inout", "input"]:
|
||||||
|
<% literal = lib.get_io_enum_literal(sig, 'mio_in') %>\
|
||||||
|
assign cio_${sig["name"]}_p2d${"[" + str(sig["idx"]) +"]" if sig["idx"] !=-1 else ""} = mio_p2d[${literal}];
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
// All muxed outputs
|
||||||
|
% for sig in top["pinmux"]["ios"]:
|
||||||
|
% if sig["connection"] == "muxed" and sig["type"] in ["inout", "output"]:
|
||||||
|
<% literal = lib.get_io_enum_literal(sig, 'mio_out') %>\
|
||||||
|
assign mio_d2p[${literal}] = cio_${sig["name"]}_d2p${"[" + str(sig["idx"]) +"]" if sig["idx"] !=-1 else ""};
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
// All muxed output enables
|
||||||
|
% for sig in top["pinmux"]["ios"]:
|
||||||
|
% if sig["connection"] == "muxed" and sig["type"] in ["inout", "output"]:
|
||||||
|
<% literal = lib.get_io_enum_literal(sig, 'mio_out') %>\
|
||||||
|
assign mio_en_d2p[${literal}] = cio_${sig["name"]}_en_d2p${"[" + str(sig["idx"]) +"]" if sig["idx"] !=-1 else ""};
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
// All dedicated inputs
|
||||||
|
<% idx = 0 %>\
|
||||||
|
logic [${num_dio_total-1}:0] unused_dio_p2d;
|
||||||
|
assign unused_dio_p2d = dio_p2d;
|
||||||
|
% for sig in top["pinmux"]["ios"]:
|
||||||
|
<% literal = lib.get_io_enum_literal(sig, 'dio') %>\
|
||||||
|
% if sig["connection"] != "muxed" and sig["type"] in ["inout"]:
|
||||||
|
assign cio_${sig["name"]}_p2d${"[" + str(sig["idx"]) +"]" if sig["idx"] !=-1 else ""} = dio_p2d[${literal}];
|
||||||
|
% elif sig["connection"] != "muxed" and sig["type"] in ["input"]:
|
||||||
|
assign cio_${sig["name"]}_p2d${"[" + str(sig["idx"]) +"]" if sig["idx"] !=-1 else ""} = dio_p2d[${literal}];
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
// All dedicated outputs
|
||||||
|
% for sig in top["pinmux"]["ios"]:
|
||||||
|
<% literal = lib.get_io_enum_literal(sig, 'dio') %>\
|
||||||
|
% if sig["connection"] != "muxed" and sig["type"] in ["inout"]:
|
||||||
|
assign dio_d2p[${literal}] = cio_${sig["name"]}_d2p${"[" + str(sig["idx"]) +"]" if sig["idx"] !=-1 else ""};
|
||||||
|
% elif sig["connection"] != "muxed" and sig["type"] in ["input"]:
|
||||||
|
assign dio_d2p[${literal}] = 1'b0;
|
||||||
|
% elif sig["connection"] != "muxed" and sig["type"] in ["output"]:
|
||||||
|
assign dio_d2p[${literal}] = cio_${sig["name"]}_d2p${"[" + str(sig["idx"]) +"]" if sig["idx"] !=-1 else ""};
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
// All dedicated output enables
|
||||||
|
% for sig in top["pinmux"]["ios"]:
|
||||||
|
<% literal = lib.get_io_enum_literal(sig, 'dio') %>\
|
||||||
|
% if sig["connection"] != "muxed" and sig["type"] in ["inout"]:
|
||||||
|
assign dio_en_d2p[${literal}] = cio_${sig["name"]}_en_d2p${"[" + str(sig["idx"]) +"]" if sig["idx"] !=-1 else ""};
|
||||||
|
% elif sig["connection"] != "muxed" and sig["type"] in ["input"]:
|
||||||
|
assign dio_en_d2p[${literal}] = 1'b0;
|
||||||
|
% elif sig["connection"] != "muxed" and sig["type"] in ["output"]:
|
||||||
|
assign dio_en_d2p[${literal}] = cio_${sig["name"]}_en_d2p${"[" + str(sig["idx"]) +"]" if sig["idx"] !=-1 else ""};
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
% endif
|
||||||
|
|
||||||
|
// make sure scanmode_i is never X (including during reset)
|
||||||
|
`ASSERT_KNOWN(scanmodeKnown, scanmode_i, clk_main_i, 0)
|
||||||
|
|
||||||
|
endmodule
|
|
@ -0,0 +1,70 @@
|
||||||
|
// Copyright lowRISC contributors.
|
||||||
|
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
#ifndef _TOP_${top["name"].upper()}_MEMORY_H_
|
||||||
|
#define _TOP_${top["name"].upper()}_MEMORY_H_
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @file
|
||||||
|
* @brief Assembler-only Top-Specific Definitions.
|
||||||
|
*
|
||||||
|
* This file contains preprocessor definitions for use within assembly code.
|
||||||
|
*
|
||||||
|
* These are not shared with C/C++ code because these are only allowed to be
|
||||||
|
* preprocessor definitions, no data or type declarations are allowed. The
|
||||||
|
* assembler is also stricter about literals (not allowing suffixes for
|
||||||
|
* signed/unsigned which are sensible to use for unsigned values in C/C++).
|
||||||
|
*/
|
||||||
|
|
||||||
|
// Include guard for assembler
|
||||||
|
#ifdef __ASSEMBLER__
|
||||||
|
|
||||||
|
|
||||||
|
% for m in top["module"]:
|
||||||
|
% if "memory" in m:
|
||||||
|
% for key, val in m["memory"].items():
|
||||||
|
/**
|
||||||
|
* Memory base for ${m["name"]}_${val["label"]} in top ${top["name"]}.
|
||||||
|
*/
|
||||||
|
#define TOP_${top["name"].upper()}_${val["label"].upper()}_BASE_ADDR ${m["base_addrs"][key]}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Memory size for ${m["name"]}_${val["label"]} in top ${top["name"]}.
|
||||||
|
*/
|
||||||
|
#define TOP_${top["name"].upper()}_${val["label"].upper()}_SIZE_BYTES ${val["size"]}
|
||||||
|
|
||||||
|
% endfor
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
% for m in top["memory"]:
|
||||||
|
/**
|
||||||
|
* Memory base address for ${m["name"]} in top ${top["name"]}.
|
||||||
|
*/
|
||||||
|
#define TOP_${top["name"].upper()}_${m["name"].upper()}_BASE_ADDR ${m["base_addr"]}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Memory size for ${m["name"]} in top ${top["name"]}.
|
||||||
|
*/
|
||||||
|
#define TOP_${top["name"].upper()}_${m["name"].upper()}_SIZE_BYTES ${m["size"]}
|
||||||
|
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
% for (inst_name, if_name), region in helper.devices():
|
||||||
|
<%
|
||||||
|
if_desc = inst_name if if_name is None else '{} device on {}'.format(if_name, inst_name)
|
||||||
|
hex_base_addr = "0x{:X}".format(region.base_addr)
|
||||||
|
base_addr_name = region.base_addr_name().as_c_define()
|
||||||
|
%>\
|
||||||
|
/**
|
||||||
|
* Peripheral base address for ${if_desc} in top ${top["name"]}.
|
||||||
|
*
|
||||||
|
* This should be used with #mmio_region_from_addr to access the memory-mapped
|
||||||
|
* registers associated with the peripheral (usually via a DIF).
|
||||||
|
*/
|
||||||
|
#define ${base_addr_name} ${hex_base_addr}
|
||||||
|
% endfor
|
||||||
|
#endif // __ASSEMBLER__
|
||||||
|
|
||||||
|
#endif // _TOP_${top["name"].upper()}_MEMORY_H_
|
|
@ -0,0 +1,39 @@
|
||||||
|
/* Copyright lowRISC contributors. */
|
||||||
|
/* Licensed under the Apache License, Version 2.0, see LICENSE for details. */
|
||||||
|
/* SPDX-License-Identifier: Apache-2.0 */
|
||||||
|
<%!
|
||||||
|
def memory_to_flags(memory):
|
||||||
|
memory_type = memory["type"]
|
||||||
|
memory_access = memory.get("swaccess", "rw")
|
||||||
|
assert memory_access in ["ro", "rw"]
|
||||||
|
|
||||||
|
flags_str = ""
|
||||||
|
if memory_access == "ro":
|
||||||
|
flags_str += "r"
|
||||||
|
else:
|
||||||
|
flags_str += "rw"
|
||||||
|
|
||||||
|
if memory_type in ["rom", "eflash"]:
|
||||||
|
flags_str += "x"
|
||||||
|
|
||||||
|
return flags_str
|
||||||
|
%>\
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Partial linker script for chip memory configuration.
|
||||||
|
* eflash virtual is a fixed address that does not physically exist but is used as the
|
||||||
|
* translation base
|
||||||
|
*/
|
||||||
|
MEMORY {
|
||||||
|
% for m in top["module"]:
|
||||||
|
% if "memory" in m:
|
||||||
|
% for key, val in m["memory"].items():
|
||||||
|
${val["label"]}(${val["swaccess"]}) : ORIGIN = ${m["base_addrs"][key]}, LENGTH = ${val["size"]}
|
||||||
|
% endfor
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
% for m in top["memory"]:
|
||||||
|
${m["name"]}(${memory_to_flags(m)}) : ORIGIN = ${m["base_addr"]}, LENGTH = ${m["size"]}
|
||||||
|
% endfor
|
||||||
|
eflash_virtual(rx) : ORIGIN = 0x80000000, LENGTH = 0x100000
|
||||||
|
}
|
|
@ -0,0 +1,112 @@
|
||||||
|
// Copyright lowRISC contributors.
|
||||||
|
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
${gencmd}
|
||||||
|
<%
|
||||||
|
import topgen.lib as lib
|
||||||
|
%>\
|
||||||
|
package top_${top["name"]}_pkg;
|
||||||
|
% for (inst_name, if_name), region in helper.devices():
|
||||||
|
<%
|
||||||
|
if_desc = inst_name if if_name is None else '{} device on {}'.format(if_name, inst_name)
|
||||||
|
hex_base_addr = "32'h{:X}".format(region.base_addr)
|
||||||
|
hex_size_bytes = "32'h{:X}".format(region.size_bytes)
|
||||||
|
%>\
|
||||||
|
/**
|
||||||
|
* Peripheral base address for ${if_desc} in top ${top["name"]}.
|
||||||
|
*/
|
||||||
|
parameter int unsigned ${region.base_addr_name().as_c_define()} = ${hex_base_addr};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Peripheral size in bytes for ${if_desc} in top ${top["name"]}.
|
||||||
|
*/
|
||||||
|
parameter int unsigned ${region.size_bytes_name().as_c_define()} = ${hex_size_bytes};
|
||||||
|
|
||||||
|
% endfor
|
||||||
|
% for name, region in helper.memories():
|
||||||
|
<%
|
||||||
|
hex_base_addr = "32'h{:x}".format(region.base_addr)
|
||||||
|
hex_size_bytes = "32'h{:x}".format(region.size_bytes)
|
||||||
|
%>\
|
||||||
|
/**
|
||||||
|
* Memory base address for ${name} in top ${top["name"]}.
|
||||||
|
*/
|
||||||
|
parameter int unsigned ${region.base_addr_name().as_c_define()} = ${hex_base_addr};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Memory size for ${name} in top ${top["name"]}.
|
||||||
|
*/
|
||||||
|
parameter int unsigned ${region.size_bytes_name().as_c_define()} = ${hex_size_bytes};
|
||||||
|
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
// Enumeration of IO power domains.
|
||||||
|
// Only used in ASIC target.
|
||||||
|
typedef enum logic [${len(top["pinout"]["banks"]).bit_length()-1}:0] {
|
||||||
|
% for bank in top["pinout"]["banks"]:
|
||||||
|
${lib.Name(['io', 'bank', bank]).as_camel_case()} = ${loop.index},
|
||||||
|
% endfor
|
||||||
|
IoBankCount = ${len(top["pinout"]["banks"])}
|
||||||
|
} pwr_dom_e;
|
||||||
|
|
||||||
|
// Enumeration for MIO signals on the top-level.
|
||||||
|
typedef enum int unsigned {
|
||||||
|
% for sig in top["pinmux"]["ios"]:
|
||||||
|
% if sig['type'] in ['inout', 'input'] and sig['connection'] == 'muxed':
|
||||||
|
${lib.get_io_enum_literal(sig, 'mio_in')} = ${sig['glob_idx']},
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
<% total = top["pinmux"]['io_counts']['muxed']['inouts'] + \
|
||||||
|
top["pinmux"]['io_counts']['muxed']['inputs'] %>\
|
||||||
|
${lib.Name.from_snake_case("mio_in_count").as_camel_case()} = ${total}
|
||||||
|
} mio_in_e;
|
||||||
|
|
||||||
|
typedef enum {
|
||||||
|
% for sig in top["pinmux"]["ios"]:
|
||||||
|
% if sig['type'] in ['inout', 'output'] and sig['connection'] == 'muxed':
|
||||||
|
${lib.get_io_enum_literal(sig, 'mio_out')} = ${sig['glob_idx']},
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
<% total = top["pinmux"]['io_counts']['muxed']['inouts'] + \
|
||||||
|
top["pinmux"]['io_counts']['muxed']['outputs'] %>\
|
||||||
|
${lib.Name.from_snake_case("mio_out_count").as_camel_case()} = ${total}
|
||||||
|
} mio_out_e;
|
||||||
|
|
||||||
|
// Enumeration for DIO signals, used on both the top and chip-levels.
|
||||||
|
typedef enum int unsigned {
|
||||||
|
% for sig in top["pinmux"]["ios"]:
|
||||||
|
% if sig['connection'] != 'muxed':
|
||||||
|
${lib.get_io_enum_literal(sig, 'dio')} = ${sig['glob_idx']},
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
<% total = top["pinmux"]['io_counts']['dedicated']['inouts'] + \
|
||||||
|
top["pinmux"]['io_counts']['dedicated']['inputs'] + \
|
||||||
|
top["pinmux"]['io_counts']['dedicated']['outputs'] %>\
|
||||||
|
${lib.Name.from_snake_case("dio_count").as_camel_case()} = ${total}
|
||||||
|
} dio_e;
|
||||||
|
|
||||||
|
// Raw MIO/DIO input array indices on chip-level.
|
||||||
|
// TODO: Does not account for target specific stubbed/added pads.
|
||||||
|
// Need to make a target-specific package for those.
|
||||||
|
typedef enum int unsigned {
|
||||||
|
% for pad in top["pinout"]["pads"]:
|
||||||
|
% if pad["connection"] == "muxed":
|
||||||
|
${lib.Name.from_snake_case("mio_pad_" + pad["name"]).as_camel_case()} = ${pad["idx"]},
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
${lib.Name.from_snake_case("mio_pad_count").as_camel_case()}
|
||||||
|
} mio_pad_e;
|
||||||
|
|
||||||
|
typedef enum int unsigned {
|
||||||
|
% for pad in top["pinout"]["pads"]:
|
||||||
|
% if pad["connection"] != "muxed":
|
||||||
|
${lib.Name.from_snake_case("dio_pad_" + pad["name"]).as_camel_case()} = ${pad["idx"]},
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
${lib.Name.from_snake_case("dio_pad_count").as_camel_case()}
|
||||||
|
} dio_pad_e;
|
||||||
|
|
||||||
|
// TODO: Enumeration for PLIC Interrupt source peripheral.
|
||||||
|
// TODO: Enumeration for PLIC Interrupt Ids.
|
||||||
|
|
||||||
|
endpackage
|
|
@ -0,0 +1,48 @@
|
||||||
|
// Copyright lowRISC contributors.
|
||||||
|
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
${gencmd}
|
||||||
|
<%
|
||||||
|
def make_blocked_sv_literal(hexstr, randwidth):
|
||||||
|
"""Chop a hex string into blocks of <= 64 digits"""
|
||||||
|
# Convert hexstr to an actual number
|
||||||
|
num = int(hexstr, 16)
|
||||||
|
assert 0 <= num < (1 << randwidth)
|
||||||
|
|
||||||
|
mask = (1 << 256) - 1
|
||||||
|
|
||||||
|
bits_left = randwidth
|
||||||
|
acc = []
|
||||||
|
while bits_left > 0:
|
||||||
|
word = num & mask
|
||||||
|
width = min(256, bits_left)
|
||||||
|
|
||||||
|
acc.append("{nbits}'h{word:0{num_nibbles}X}"
|
||||||
|
.format(word=word,
|
||||||
|
nbits=width,
|
||||||
|
num_nibbles=(width + 3) // 4))
|
||||||
|
bits_left -= width
|
||||||
|
num >>= width
|
||||||
|
|
||||||
|
acc.reverse()
|
||||||
|
return acc
|
||||||
|
%>
|
||||||
|
package top_${top["name"]}_rnd_cnst_pkg;
|
||||||
|
|
||||||
|
% for m in top["module"]:
|
||||||
|
% for p in filter(lambda p: p.get("randtype") in ["data", "perm"], m["param_list"]):
|
||||||
|
% if loop.first:
|
||||||
|
////////////////////////////////////////////
|
||||||
|
// ${m['name']}
|
||||||
|
////////////////////////////////////////////
|
||||||
|
% endif
|
||||||
|
// ${p['desc']}
|
||||||
|
parameter ${p["type"]} ${p["name_top"]} = {
|
||||||
|
% for block in make_blocked_sv_literal(p["default"], p["randwidth"]):
|
||||||
|
${block}${"" if loop.last else ","}
|
||||||
|
% endfor
|
||||||
|
};
|
||||||
|
|
||||||
|
% endfor
|
||||||
|
% endfor
|
||||||
|
endpackage : top_${top["name"]}_rnd_cnst_pkg
|
|
@ -0,0 +1,88 @@
|
||||||
|
// Copyright lowRISC contributors.
|
||||||
|
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
//
|
||||||
|
// xbar_env_pkg__params generated by `topgen.py` tool
|
||||||
|
|
||||||
|
<%
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
def is_device_a_xbar(dev_name):
|
||||||
|
for xbar in top["xbar"]:
|
||||||
|
if xbar["name"] == dev_name:
|
||||||
|
return 1
|
||||||
|
return 0
|
||||||
|
|
||||||
|
# recursively find all non-xbar devices under this xbar
|
||||||
|
def get_xbar_edge_nodes(xbar_name):
|
||||||
|
edge_devices = []
|
||||||
|
for xbar in top["xbar"]:
|
||||||
|
if xbar["name"] == xbar_name:
|
||||||
|
for host, devices in xbar["connections"].items():
|
||||||
|
for dev_name in devices:
|
||||||
|
if is_device_a_xbar(dev_name):
|
||||||
|
edge_devices.extend(get_xbar_edge_nodes())
|
||||||
|
else:
|
||||||
|
edge_devices.append(dev_name)
|
||||||
|
|
||||||
|
return edge_devices
|
||||||
|
|
||||||
|
# find device xbar and assign all its device nodes to it: "peri" -> "uart, gpio, ..."
|
||||||
|
xbar_device_dict = OrderedDict()
|
||||||
|
|
||||||
|
for xbar in top["xbar"]:
|
||||||
|
for n in xbar["nodes"]:
|
||||||
|
if n["type"] == "device" and n["xbar"]:
|
||||||
|
xbar_device_dict[n["name"]] = get_xbar_edge_nodes(n["name"])
|
||||||
|
|
||||||
|
# create the mapping: host with the corresponding devices map
|
||||||
|
host_dev_map = OrderedDict()
|
||||||
|
for host, devices in top["xbar"][0]["connections"].items():
|
||||||
|
dev_list = []
|
||||||
|
for dev in devices:
|
||||||
|
if dev not in xbar_device_dict.keys():
|
||||||
|
dev_list.append(dev)
|
||||||
|
else:
|
||||||
|
dev_list.extend(xbar_device_dict[dev])
|
||||||
|
host_dev_map[host] = dev_list
|
||||||
|
|
||||||
|
%>\
|
||||||
|
|
||||||
|
// List of Xbar device memory map
|
||||||
|
tl_device_t xbar_devices[$] = '{
|
||||||
|
% for xbar in top["xbar"]:
|
||||||
|
% for device in xbar["nodes"]:
|
||||||
|
% if device["type"] == "device" and not device["xbar"]:
|
||||||
|
'{"${device["name"].replace('.', '__')}", '{
|
||||||
|
% for addr in device["addr_range"]:
|
||||||
|
<%
|
||||||
|
start_addr = int(addr["base_addr"], 0)
|
||||||
|
end_addr = start_addr + int(addr["size_byte"], 0) - 1
|
||||||
|
%>\
|
||||||
|
'{32'h${"%08x" % start_addr}, 32'h${"%08x" % end_addr}}${"," if not loop.last else ""}
|
||||||
|
% endfor
|
||||||
|
}}${"," if not loop.last or xbar != top["xbar"][-1] else "};"}
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
// List of Xbar hosts
|
||||||
|
tl_host_t xbar_hosts[$] = '{
|
||||||
|
% for host in host_dev_map.keys():
|
||||||
|
'{"${host.replace('.', '__')}", ${loop.index}, '{
|
||||||
|
<%
|
||||||
|
host_devices = host_dev_map[host];
|
||||||
|
%>\
|
||||||
|
% for device in host_devices:
|
||||||
|
% if loop.last:
|
||||||
|
"${device.replace('.', '__')}"}}
|
||||||
|
% else:
|
||||||
|
"${device.replace('.', '__')}",
|
||||||
|
% endif
|
||||||
|
% endfor
|
||||||
|
% if loop.last:
|
||||||
|
};
|
||||||
|
% else:
|
||||||
|
,
|
||||||
|
% endif
|
||||||
|
% endfor
|
|
@ -0,0 +1,122 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
'''Code representing the entire chip for reggen'''
|
||||||
|
|
||||||
|
from typing import Dict, List, Optional, Tuple, Union
|
||||||
|
|
||||||
|
from reggen.ip_block import IpBlock
|
||||||
|
from reggen.params import ReggenParams
|
||||||
|
from reggen.reg_block import RegBlock
|
||||||
|
from reggen.window import Window
|
||||||
|
|
||||||
|
_IFName = Tuple[str, Optional[str]]
|
||||||
|
_Triple = Tuple[int, str, IpBlock]
|
||||||
|
|
||||||
|
|
||||||
|
class Top:
|
||||||
|
'''An object representing the entire chip, as seen by reggen.
|
||||||
|
|
||||||
|
This contains instances of some blocks (possibly multiple instances of each
|
||||||
|
block), starting at well-defined base addresses. It may also contain some
|
||||||
|
windows. These are memories that don't have their own comportable IP (so
|
||||||
|
aren't defined in a block), but still take up address space.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
regwidth: int,
|
||||||
|
blocks: Dict[str, IpBlock],
|
||||||
|
instances: Dict[str, str],
|
||||||
|
if_addrs: Dict[Tuple[str, Optional[str]], int],
|
||||||
|
windows: List[Window],
|
||||||
|
attrs: Dict[str, str]):
|
||||||
|
'''Class initializer.
|
||||||
|
|
||||||
|
regwidth is the width of the registers (which must match for all the
|
||||||
|
blocks) in bits.
|
||||||
|
|
||||||
|
blocks is a map from block name to IpBlock object.
|
||||||
|
|
||||||
|
instances is a map from instance name to the name of the block it
|
||||||
|
instantiates. Every block name that appears in instances must be a key
|
||||||
|
of blocks.
|
||||||
|
|
||||||
|
if_addrs is a dictionary that maps the name of a device interface on
|
||||||
|
some instance of some block to its base address. A key of the form (n,
|
||||||
|
i) means "the device interface called i on an instance called n". If i
|
||||||
|
is None, this is an unnamed device interface. Every instance name (n)
|
||||||
|
that appears in connections must be a key of instances.
|
||||||
|
|
||||||
|
windows is a list of windows (these contain base addresses already).
|
||||||
|
|
||||||
|
attrs is a map from instance name to attr field of the block
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
self.regwidth = regwidth
|
||||||
|
self.blocks = blocks
|
||||||
|
self.instances = instances
|
||||||
|
self.if_addrs = if_addrs
|
||||||
|
self.attrs = attrs
|
||||||
|
|
||||||
|
self.window_block = RegBlock(regwidth, ReggenParams())
|
||||||
|
|
||||||
|
# Generate one list of base addresses and objects (with each object
|
||||||
|
# either a block name and interface name or a window). While we're at
|
||||||
|
# it, construct inst_to_block_name and if_addrs.
|
||||||
|
merged = [] # type: List[Tuple[int, Union[_IFName, Window]]]
|
||||||
|
for full_if_name, addr in if_addrs.items():
|
||||||
|
merged.append((addr, full_if_name))
|
||||||
|
|
||||||
|
inst_name, if_name = full_if_name
|
||||||
|
|
||||||
|
# The instance name must match some key in instances, whose value
|
||||||
|
# should in turn match some key in blocks.
|
||||||
|
assert inst_name in instances
|
||||||
|
block_name = instances[inst_name]
|
||||||
|
assert block_name in blocks
|
||||||
|
|
||||||
|
# Check that if_name is indeed the name of a device interface for
|
||||||
|
# that block.
|
||||||
|
block = blocks[block_name]
|
||||||
|
assert block.bus_interfaces.has_interface(False, if_name)
|
||||||
|
|
||||||
|
for window in sorted(windows, key=lambda w: w.offset):
|
||||||
|
merged.append((window.offset, window))
|
||||||
|
self.window_block.add_window(window)
|
||||||
|
|
||||||
|
# A map from block name to the list of its instances. These instances
|
||||||
|
# are listed in increasing order of the lowest base address of one of
|
||||||
|
# their interfaces. The entries are added into the dict in the same
|
||||||
|
# order, so an iteration over items() will give blocks ordered by their
|
||||||
|
# first occurrence in the address map.
|
||||||
|
self.block_instances = {} # type: Dict[str, List[str]]
|
||||||
|
|
||||||
|
# Walk the merged list in order of increasing base address. Check for
|
||||||
|
# overlaps and construct block_instances.
|
||||||
|
offset = 0
|
||||||
|
for base_addr, item in sorted(merged, key=lambda pr: pr[0]):
|
||||||
|
# Make sure that this item doesn't overlap with the previous one
|
||||||
|
assert offset <= base_addr, item
|
||||||
|
|
||||||
|
if isinstance(item, Window):
|
||||||
|
addrsep = (regwidth + 7) // 8
|
||||||
|
offset = item.next_offset(addrsep)
|
||||||
|
continue
|
||||||
|
|
||||||
|
inst_name, if_name = item
|
||||||
|
block_name = instances[inst_name]
|
||||||
|
block = blocks[block_name]
|
||||||
|
|
||||||
|
lst = self.block_instances.setdefault(block_name, [])
|
||||||
|
if inst_name not in lst:
|
||||||
|
lst.append(inst_name)
|
||||||
|
|
||||||
|
# This should be guaranteed by the fact that we've already checked
|
||||||
|
# the existence of a device interface.
|
||||||
|
assert if_name in block.reg_blocks
|
||||||
|
reg_block = block.reg_blocks[if_name]
|
||||||
|
|
||||||
|
offset = base_addr + reg_block.offset
|
|
@ -0,0 +1,151 @@
|
||||||
|
// Copyright lowRISC contributors.
|
||||||
|
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
// UVM registers auto-generated by `reggen` containing UVM definitions for the entire top-level
|
||||||
|
<%!
|
||||||
|
from topgen.gen_dv import sv_base_addr
|
||||||
|
from reggen.gen_dv import bcname, mcname, miname
|
||||||
|
%>
|
||||||
|
##
|
||||||
|
## This template is used for chip-wide tests. It expects to be run with the
|
||||||
|
## following arguments
|
||||||
|
##
|
||||||
|
## top a Top object
|
||||||
|
##
|
||||||
|
## dv_base_prefix a string for the base register type. If it is FOO, then
|
||||||
|
## we will inherit from FOO_reg (assumed to be a subclass
|
||||||
|
## of uvm_reg).
|
||||||
|
##
|
||||||
|
## Like uvm_reg.sv.tpl, we use functions from uvm_reg_base.sv.tpl to define
|
||||||
|
## per-device-interface code.
|
||||||
|
##
|
||||||
|
<%namespace file="uvm_reg_base.sv.tpl" import="*"/>\
|
||||||
|
##
|
||||||
|
##
|
||||||
|
## Waive the package-filename check: we're going to be defining all sorts of
|
||||||
|
## packages in a single file.
|
||||||
|
|
||||||
|
// verilog_lint: waive-start package-filename
|
||||||
|
##
|
||||||
|
## Iterate over the device interfaces of blocks in Top, constructing a package
|
||||||
|
## for each. Sorting items like this guarantees we'll work alphabetically in
|
||||||
|
## block name.
|
||||||
|
% for block_name, block in sorted(top.blocks.items()):
|
||||||
|
% for if_name, rb in block.reg_blocks.items():
|
||||||
|
<%
|
||||||
|
if_suffix = '' if if_name is None else '_' + if_name
|
||||||
|
esc_if_name = block_name.lower() + if_suffix
|
||||||
|
if_desc = '' if if_name is None else '; interface {}'.format(if_name)
|
||||||
|
reg_block_path = 'u_reg' + if_suffix
|
||||||
|
reg_block_path = reg_block_path if block.hier_path is None else block.hier_path + "." + reg_block_path
|
||||||
|
%>\
|
||||||
|
// Block: ${block_name.lower()}${if_desc}
|
||||||
|
${make_ral_pkg(dv_base_prefix, top.regwidth, reg_block_path, rb, esc_if_name)}
|
||||||
|
% endfor
|
||||||
|
% endfor
|
||||||
|
##
|
||||||
|
##
|
||||||
|
## Now that we've made the block-level packages, re-instate the
|
||||||
|
## package-filename check. The only package left is chip_ral_pkg, which should
|
||||||
|
## match the generated filename.
|
||||||
|
|
||||||
|
// verilog_lint: waive-start package-filename
|
||||||
|
|
||||||
|
// Block: chip
|
||||||
|
package chip_ral_pkg;
|
||||||
|
<%
|
||||||
|
if_packages = []
|
||||||
|
for block_name, block in sorted(top.blocks.items()):
|
||||||
|
for if_name in block.reg_blocks:
|
||||||
|
if_suffix = '' if if_name is None else '_' + if_name
|
||||||
|
if_packages.append('{}{}_ral_pkg'.format(block_name.lower(), if_suffix))
|
||||||
|
|
||||||
|
windows = top.window_block.windows
|
||||||
|
%>\
|
||||||
|
${make_ral_pkg_hdr(dv_base_prefix, if_packages)}
|
||||||
|
${make_ral_pkg_fwd_decls('chip', [], windows)}
|
||||||
|
% for window in windows:
|
||||||
|
|
||||||
|
${make_ral_pkg_window_class(dv_base_prefix, 'chip', window)}
|
||||||
|
% endfor
|
||||||
|
|
||||||
|
class chip_reg_block extends ${dv_base_prefix}_reg_block;
|
||||||
|
// sub blocks
|
||||||
|
% for block_name, block in sorted(top.blocks.items()):
|
||||||
|
% for inst_name in top.block_instances[block_name.lower()]:
|
||||||
|
% for if_name, rb in block.reg_blocks.items():
|
||||||
|
<%
|
||||||
|
if_suffix = '' if if_name is None else '_' + if_name
|
||||||
|
esc_if_name = block_name.lower() + if_suffix
|
||||||
|
if_inst = inst_name + if_suffix
|
||||||
|
%>\
|
||||||
|
rand ${bcname(esc_if_name)} ${if_inst};
|
||||||
|
% endfor
|
||||||
|
% endfor
|
||||||
|
% endfor
|
||||||
|
% if windows:
|
||||||
|
// memories
|
||||||
|
% for window in windows:
|
||||||
|
rand ${mcname('chip', window)} ${miname(window)};
|
||||||
|
% endfor
|
||||||
|
% endif
|
||||||
|
|
||||||
|
`uvm_object_utils(chip_reg_block)
|
||||||
|
|
||||||
|
function new(string name = "chip_reg_block",
|
||||||
|
int has_coverage = UVM_NO_COVERAGE);
|
||||||
|
super.new(name, has_coverage);
|
||||||
|
endfunction : new
|
||||||
|
|
||||||
|
virtual function void build(uvm_reg_addr_t base_addr,
|
||||||
|
csr_excl_item csr_excl = null);
|
||||||
|
// create default map
|
||||||
|
this.default_map = create_map(.name("default_map"),
|
||||||
|
.base_addr(base_addr),
|
||||||
|
.n_bytes(${top.regwidth//8}),
|
||||||
|
.endian(UVM_LITTLE_ENDIAN));
|
||||||
|
if (csr_excl == null) begin
|
||||||
|
csr_excl = csr_excl_item::type_id::create("csr_excl");
|
||||||
|
this.csr_excl = csr_excl;
|
||||||
|
end
|
||||||
|
|
||||||
|
// create sub blocks and add their maps
|
||||||
|
% for block_name, block in sorted(top.blocks.items()):
|
||||||
|
% for inst_name in top.block_instances[block_name.lower()]:
|
||||||
|
% for if_name, rb in block.reg_blocks.items():
|
||||||
|
<%
|
||||||
|
if_suffix = '' if if_name is None else '_' + if_name
|
||||||
|
esc_if_name = block_name.lower() + if_suffix
|
||||||
|
if_inst = inst_name + if_suffix
|
||||||
|
|
||||||
|
if top.attrs.get(inst_name) == 'reggen_only':
|
||||||
|
hdl_path = 'tb.dut.u_' + inst_name
|
||||||
|
else:
|
||||||
|
hdl_path = 'tb.dut.top_earlgrey.u_' + inst_name
|
||||||
|
qual_if_name = (inst_name, if_name)
|
||||||
|
base_addr = top.if_addrs[qual_if_name]
|
||||||
|
base_addr_txt = sv_base_addr(top, qual_if_name)
|
||||||
|
|
||||||
|
hpr_indent = (len(if_inst) + len('.set_hdl_path_root(')) * ' '
|
||||||
|
%>\
|
||||||
|
${if_inst} = ${bcname(esc_if_name)}::type_id::create("${if_inst}");
|
||||||
|
${if_inst}.configure(.parent(this));
|
||||||
|
${if_inst}.build(.base_addr(base_addr + ${base_addr_txt}), .csr_excl(csr_excl));
|
||||||
|
${if_inst}.set_hdl_path_root("${hdl_path}",
|
||||||
|
${hpr_indent}"BkdrRegPathRtl");
|
||||||
|
${if_inst}.set_hdl_path_root("${hdl_path}",
|
||||||
|
${hpr_indent}"BkdrRegPathRtlCommitted");
|
||||||
|
${if_inst}.set_hdl_path_root("${hdl_path}",
|
||||||
|
${hpr_indent}"BkdrRegPathRtlShadow");
|
||||||
|
default_map.add_submap(.child_map(${if_inst}.default_map),
|
||||||
|
.offset(base_addr + ${base_addr_txt}));
|
||||||
|
% endfor
|
||||||
|
% endfor
|
||||||
|
% endfor
|
||||||
|
${make_ral_pkg_window_instances(top.regwidth, 'chip', top.window_block)}
|
||||||
|
|
||||||
|
endfunction : build
|
||||||
|
endclass : chip_reg_block
|
||||||
|
|
||||||
|
endpackage
|
|
@ -0,0 +1,846 @@
|
||||||
|
# Copyright lowRISC contributors.
|
||||||
|
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
import re
|
||||||
|
import logging as log
|
||||||
|
from collections import OrderedDict
|
||||||
|
from enum import Enum
|
||||||
|
from typing import Dict, List
|
||||||
|
|
||||||
|
from reggen.validate import check_keys
|
||||||
|
from reggen.ip_block import IpBlock
|
||||||
|
|
||||||
|
# For the reference
|
||||||
|
# val_types = {
|
||||||
|
# 'd': ["int", "integer (binary 0b, octal 0o, decimal, hex 0x)"],
|
||||||
|
# 'x': ["xint", "x for undefined otherwise int"],
|
||||||
|
# 'b': [
|
||||||
|
# "bitrange", "bit number as decimal integer, \
|
||||||
|
# or bit-range as decimal integers msb:lsb"
|
||||||
|
# ],
|
||||||
|
# 'l': ["list", "comma separated list enclosed in `[]`"],
|
||||||
|
# 'ln': ["name list", 'comma separated list enclosed in `[]` of '\
|
||||||
|
# 'one or more groups that have just name and dscr keys.'\
|
||||||
|
# ' e.g. `{ name: "name", desc: "description"}`'],
|
||||||
|
# 'lnw': ["name list+", 'name list that optionally contains a width'],
|
||||||
|
# 'lp': ["parameter list", 'parameter list having default value optionally'],
|
||||||
|
# 'g': ["group", "comma separated group of key:value enclosed in `{}`"],
|
||||||
|
# 's': ["string", "string, typically short"],
|
||||||
|
# 't': ["text", "string, may be multi-line enclosed in `'''` "\
|
||||||
|
# "may use `**bold**`, `*italic*` or `!!Reg` markup"],
|
||||||
|
# 'T': ["tuple", "tuple enclosed in ()"],
|
||||||
|
# 'pi': ["python int", "Native Python type int (generated)"],
|
||||||
|
# 'pb': ["python Bool", "Native Python type Bool (generated)"],
|
||||||
|
# 'pl': ["python list", "Native Python type list (generated)"],
|
||||||
|
# 'pe': ["python enum", "Native Python type enum (generated)"]
|
||||||
|
# }
|
||||||
|
|
||||||
|
# Required/optional field in top hjson
|
||||||
|
top_required = {
|
||||||
|
'name': ['s', 'Top name'],
|
||||||
|
'type': ['s', 'type of hjson. Shall be "top" always'],
|
||||||
|
'clocks': ['g', 'group of clock properties'],
|
||||||
|
'resets': ['l', 'list of resets'],
|
||||||
|
'module': ['l', 'list of modules to instantiate'],
|
||||||
|
'memory': ['l', 'list of memories. At least one memory '
|
||||||
|
'is needed to run the software'],
|
||||||
|
'xbar': ['l', 'List of the xbar used in the top'],
|
||||||
|
'rnd_cnst_seed': ['int', "Seed for random netlist constant computation"],
|
||||||
|
'pinout': ['g', 'Pinout configuration'],
|
||||||
|
'targets': ['l', ' Target configurations'],
|
||||||
|
'pinmux': ['g', 'pinmux configuration'],
|
||||||
|
}
|
||||||
|
|
||||||
|
top_optional = {
|
||||||
|
'alert_async': ['l', 'async alerts (generated)'],
|
||||||
|
'alert': ['lnw', 'alerts (generated)'],
|
||||||
|
'alert_module': [
|
||||||
|
'l',
|
||||||
|
'list of the modules that connects to alert_handler'
|
||||||
|
],
|
||||||
|
'datawidth': ['pn', "default data width"],
|
||||||
|
'exported_clks': ['g', 'clock signal routing rules'],
|
||||||
|
'host': ['g', 'list of host-only components in the system'],
|
||||||
|
'inter_module': ['g', 'define the signal connections between the modules'],
|
||||||
|
'interrupt': ['lnw', 'interrupts (generated)'],
|
||||||
|
'interrupt_module': ['l', 'list of the modules that connects to rv_plic'],
|
||||||
|
'num_cores': ['pn', "number of computing units"],
|
||||||
|
'power': ['g', 'power domains supported by the design'],
|
||||||
|
'port': ['g', 'assign special attributes to specific ports']
|
||||||
|
}
|
||||||
|
|
||||||
|
top_added = {}
|
||||||
|
|
||||||
|
pinmux_required = {}
|
||||||
|
pinmux_optional = {
|
||||||
|
'num_wkup_detect': [
|
||||||
|
'd', 'Number of wakeup detectors'
|
||||||
|
],
|
||||||
|
'wkup_cnt_width': [
|
||||||
|
'd', 'Number of bits in wakeup detector counters'
|
||||||
|
],
|
||||||
|
'signals': ['l', 'List of Dedicated IOs.'],
|
||||||
|
}
|
||||||
|
pinmux_added = {
|
||||||
|
'ios': ['l', 'Full list of IO'],
|
||||||
|
}
|
||||||
|
|
||||||
|
pinmux_sig_required = {
|
||||||
|
'instance': ['s', 'Module instance name'],
|
||||||
|
'connection': ['s', 'Specification of connection type, '
|
||||||
|
'can be direct, manual or muxed'],
|
||||||
|
}
|
||||||
|
pinmux_sig_optional = {
|
||||||
|
'port': ['s', 'Port name of module'],
|
||||||
|
'pad': ['s', 'Pad name for direct connections'],
|
||||||
|
'desc': ['s', 'Signal description'],
|
||||||
|
'attr': ['s', 'Pad type for generating the correct attribute CSR']
|
||||||
|
}
|
||||||
|
pinmux_sig_added = {}
|
||||||
|
|
||||||
|
pinout_required = {
|
||||||
|
'banks': ['l', 'List of IO power banks'],
|
||||||
|
'pads': ['l', 'List of pads']
|
||||||
|
}
|
||||||
|
pinout_optional = {
|
||||||
|
}
|
||||||
|
pinout_added = {}
|
||||||
|
|
||||||
|
pad_required = {
|
||||||
|
'name': ['l', 'Pad name'],
|
||||||
|
'type': ['s', 'Pad type'],
|
||||||
|
'bank': ['s', 'IO power bank for the pad'],
|
||||||
|
'connection': ['s', 'Specification of connection type, '
|
||||||
|
'can be direct, manual or muxed'],
|
||||||
|
}
|
||||||
|
pad_optional = {
|
||||||
|
'desc': ['s', 'Pad description'],
|
||||||
|
}
|
||||||
|
pad_added = {}
|
||||||
|
|
||||||
|
target_required = {
|
||||||
|
'name': ['s', 'Name of target'],
|
||||||
|
'pinout': ['g', 'Target-specific pinout configuration'],
|
||||||
|
'pinmux': ['g', 'Target-specific pinmux configuration']
|
||||||
|
}
|
||||||
|
target_optional = {
|
||||||
|
}
|
||||||
|
target_added = {}
|
||||||
|
|
||||||
|
target_pinmux_required = {
|
||||||
|
'special_signals': ['l', 'List of special signals and the pad they are mapped to.'],
|
||||||
|
}
|
||||||
|
target_pinmux_optional = {}
|
||||||
|
target_pinmux_added = {}
|
||||||
|
|
||||||
|
target_pinout_required = {
|
||||||
|
'remove_pads': ['l', 'List of pad names to remove and stub out'],
|
||||||
|
'add_pads': ['l', 'List of manual pads to add'],
|
||||||
|
}
|
||||||
|
target_pinout_optional = {}
|
||||||
|
target_pinout_added = {}
|
||||||
|
|
||||||
|
straps_required = {
|
||||||
|
'tap0': ['s', 'Name of tap0 pad'],
|
||||||
|
'tap1': ['s', 'Name of tap1 pad'],
|
||||||
|
'dft0': ['s', 'Name of dft0 pad'],
|
||||||
|
'dft1': ['s', 'Name of dft1 pad'],
|
||||||
|
}
|
||||||
|
straps_optional = {}
|
||||||
|
straps_added = {}
|
||||||
|
|
||||||
|
straps_required = {
|
||||||
|
'tap0': ['s', 'Name of tap0 pad'],
|
||||||
|
'tap1': ['s', 'Name of tap1 pad'],
|
||||||
|
'dft0': ['s', 'Name of dft0 pad'],
|
||||||
|
'dft1': ['s', 'Name of dft1 pad'],
|
||||||
|
}
|
||||||
|
straps_optional = {}
|
||||||
|
straps_added = {}
|
||||||
|
|
||||||
|
special_sig_required = {
|
||||||
|
'name': ['s', 'DIO name'],
|
||||||
|
'pad': ['s', 'Pad name'],
|
||||||
|
}
|
||||||
|
special_sig_optional = {
|
||||||
|
'desc': ['s', 'Description of signal connection'],
|
||||||
|
}
|
||||||
|
special_sig_added = {}
|
||||||
|
|
||||||
|
eflash_required = {
|
||||||
|
'banks': ['d', 'number of flash banks'],
|
||||||
|
'base_addr': ['s', 'hex start address of memory'],
|
||||||
|
'clock_connections': ['g', 'generated, elaborated version of clock_srcs'],
|
||||||
|
'clock_group': ['s', 'associated clock attribute group'],
|
||||||
|
'clock_srcs': ['g', 'clock connections'],
|
||||||
|
'inter_signal_list': ['lg', 'intersignal list'],
|
||||||
|
'name': ['s', 'name of flash memory'],
|
||||||
|
'pages_per_bank': ['d', 'number of data pages per flash bank'],
|
||||||
|
'program_resolution': ['d', 'maximum number of flash words allowed to program'],
|
||||||
|
'reset_connections': ['g', 'reset connections'],
|
||||||
|
'swaccess': ['s', 'software accessibility'],
|
||||||
|
'type': ['s', 'type of memory']
|
||||||
|
}
|
||||||
|
|
||||||
|
eflash_optional = {}
|
||||||
|
|
||||||
|
eflash_added = {}
|
||||||
|
|
||||||
|
module_required = {
|
||||||
|
'name': ['s', 'name of the instance'],
|
||||||
|
'type': ['s', 'comportable IP type'],
|
||||||
|
'clock_srcs': ['g', 'dict with clock sources'],
|
||||||
|
'clock_group': ['s', 'clock group'],
|
||||||
|
'reset_connections': ['g', 'dict with reset sources'],
|
||||||
|
}
|
||||||
|
|
||||||
|
module_optional = {
|
||||||
|
'domain': ['s', 'power domain, defaults to Domain0'],
|
||||||
|
'clock_reset_export': ['l', 'optional list with prefixes for exported '
|
||||||
|
'clocks and resets at the chip level'],
|
||||||
|
'attr': ['s', 'optional attribute indicating whether the IP is '
|
||||||
|
'"templated" or "reggen_only"'],
|
||||||
|
'base_addr': ['s', 'hex start address of the peripheral '
|
||||||
|
'(if the IP has only a single TL-UL interface)'],
|
||||||
|
'base_addrs': ['d', 'hex start addresses of the peripheral '
|
||||||
|
' (if the IP has multiple TL-UL interfaces)'],
|
||||||
|
'memory': ['g', 'optional dict with memory region attributes'],
|
||||||
|
'param_decl': ['g', 'optional dict that allows to override instantiation parameters']
|
||||||
|
}
|
||||||
|
|
||||||
|
module_added = {
|
||||||
|
'clock_connections': ['g', 'generated clock connections']
|
||||||
|
}
|
||||||
|
|
||||||
|
memory_required = {
|
||||||
|
'label': ['s', 'region label for the linker script'],
|
||||||
|
'swaccess': ['s', 'access attributes for the linker script'],
|
||||||
|
'size': ['d', 'memory region size in bytes for the linker script, '
|
||||||
|
'xbar and RTL parameterisations'],
|
||||||
|
}
|
||||||
|
|
||||||
|
memory_optional = {
|
||||||
|
}
|
||||||
|
|
||||||
|
memory_added = {
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Supported PAD types.
|
||||||
|
# Needs to coincide with enum definition in prim_pad_wrapper_pkg.sv
|
||||||
|
class PadType(Enum):
|
||||||
|
INPUT_STD = 'InputStd'
|
||||||
|
BIDIR_STD = 'BidirStd'
|
||||||
|
BIDIR_TOL = 'BidirTol'
|
||||||
|
BIDIR_OD = 'BidirOd'
|
||||||
|
ANALOG_IN0 = 'AnalogIn0'
|
||||||
|
ANALOG_IN1 = 'AnalogIn1'
|
||||||
|
|
||||||
|
|
||||||
|
def is_valid_pad_type(obj):
|
||||||
|
try:
|
||||||
|
PadType(obj)
|
||||||
|
except ValueError:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class TargetType(Enum):
|
||||||
|
MODULE = "module"
|
||||||
|
XBAR = "xbar"
|
||||||
|
|
||||||
|
|
||||||
|
class Target:
|
||||||
|
"""Target class informs the checkers if we are validating a module or xbar
|
||||||
|
"""
|
||||||
|
def __init__(self, target_type):
|
||||||
|
# The type of this target
|
||||||
|
self.target_type = target_type
|
||||||
|
# The key to search against
|
||||||
|
if target_type == TargetType.MODULE:
|
||||||
|
self.key = "type"
|
||||||
|
else:
|
||||||
|
self.key = "name"
|
||||||
|
|
||||||
|
|
||||||
|
class Flash:
|
||||||
|
"""Flash class contains information regarding parameter defaults.
|
||||||
|
For now, only expose banks / pages_per_bank for user configuration.
|
||||||
|
For now, also enforce power of 2 requiremnt.
|
||||||
|
"""
|
||||||
|
max_banks = 4
|
||||||
|
max_pages_per_bank = 1024
|
||||||
|
|
||||||
|
def __init__(self, mem):
|
||||||
|
self.banks = mem['banks']
|
||||||
|
self.pages_per_bank = mem['pages_per_bank']
|
||||||
|
self.program_resolution = mem['program_resolution']
|
||||||
|
self.words_per_page = 256
|
||||||
|
self.data_width = 64
|
||||||
|
self.metadata_width = 12
|
||||||
|
self.info_types = 3
|
||||||
|
self.infos_per_bank = [10, 1, 2]
|
||||||
|
|
||||||
|
def is_pow2(self, n):
|
||||||
|
return (n != 0) and (n & (n - 1) == 0)
|
||||||
|
|
||||||
|
def check_values(self):
|
||||||
|
pow2_check = (self.is_pow2(self.banks) and
|
||||||
|
self.is_pow2(self.pages_per_bank) and
|
||||||
|
self.is_pow2(self.program_resolution))
|
||||||
|
limit_check = ((self.banks <= Flash.max_banks) and
|
||||||
|
(self.pages_per_bank <= Flash.max_pages_per_bank))
|
||||||
|
|
||||||
|
return pow2_check and limit_check
|
||||||
|
|
||||||
|
def calc_size(self):
|
||||||
|
word_bytes = self.data_width / 8
|
||||||
|
bytes_per_page = word_bytes * self.words_per_page
|
||||||
|
bytes_per_bank = bytes_per_page * self.pages_per_bank
|
||||||
|
return bytes_per_bank * self.banks
|
||||||
|
|
||||||
|
def populate(self, mem):
|
||||||
|
mem['words_per_page'] = self.words_per_page
|
||||||
|
mem['data_width'] = self.data_width
|
||||||
|
mem['metadata_width'] = self.metadata_width
|
||||||
|
mem['info_types'] = self.info_types
|
||||||
|
mem['infos_per_bank'] = self.infos_per_bank
|
||||||
|
mem['size'] = hex(int(self.calc_size()))
|
||||||
|
|
||||||
|
word_bytes = self.data_width / 8
|
||||||
|
mem['pgm_resolution_bytes'] = int(self.program_resolution * word_bytes)
|
||||||
|
|
||||||
|
|
||||||
|
# Check to see if each module/xbar defined in top.hjson exists as ip/xbar.hjson
|
||||||
|
# Also check to make sure there are not multiple definitions of ip/xbar.hjson for each
|
||||||
|
# top level definition
|
||||||
|
# If it does, return a dictionary of instance names to index in ip/xbarobjs
|
||||||
|
def check_target(top, objs, tgtobj):
|
||||||
|
error = 0
|
||||||
|
idxs = OrderedDict()
|
||||||
|
|
||||||
|
# Collect up counts of object names. We support entries of objs that are
|
||||||
|
# either dicts (for top-levels) or IpBlock objects.
|
||||||
|
name_indices = {}
|
||||||
|
for idx, obj in enumerate(objs):
|
||||||
|
if isinstance(obj, IpBlock):
|
||||||
|
name = obj.name.lower()
|
||||||
|
else:
|
||||||
|
name = obj['name'].lower()
|
||||||
|
|
||||||
|
log.info("%d Order is %s" % (idx, name))
|
||||||
|
name_indices.setdefault(name, []).append(idx)
|
||||||
|
|
||||||
|
tgt_type = tgtobj.target_type.value
|
||||||
|
inst_key = tgtobj.key
|
||||||
|
|
||||||
|
for cfg in top[tgt_type]:
|
||||||
|
cfg_name = cfg['name'].lower()
|
||||||
|
log.info("Checking target %s %s" % (tgt_type, cfg_name))
|
||||||
|
|
||||||
|
indices = name_indices.get(cfg[inst_key], [])
|
||||||
|
if not indices:
|
||||||
|
log.error("Could not find %s.hjson" % cfg_name)
|
||||||
|
error += 1
|
||||||
|
elif len(indices) > 1:
|
||||||
|
log.error("Duplicate %s.hjson" % cfg_name)
|
||||||
|
error += 1
|
||||||
|
else:
|
||||||
|
idxs[cfg_name] = indices[0]
|
||||||
|
|
||||||
|
log.info("Current state %s" % idxs)
|
||||||
|
return error, idxs
|
||||||
|
|
||||||
|
|
||||||
|
def check_pad(top: Dict,
|
||||||
|
pad: Dict,
|
||||||
|
known_pad_names: Dict,
|
||||||
|
valid_connections: List[str],
|
||||||
|
prefix: str) -> int:
|
||||||
|
error = 0
|
||||||
|
error += check_keys(pad, pad_required, pad_optional,
|
||||||
|
pad_added, prefix)
|
||||||
|
|
||||||
|
# check name uniqueness
|
||||||
|
if pad['name'] in known_pad_names:
|
||||||
|
log.warning('Pad name {} is not unique'.format(pad['name']))
|
||||||
|
error += 1
|
||||||
|
known_pad_names[pad['name']] = 1
|
||||||
|
|
||||||
|
if not is_valid_pad_type(pad['type']):
|
||||||
|
log.warning('Unkown pad type {}'.format(pad['type']))
|
||||||
|
error += 1
|
||||||
|
|
||||||
|
if pad['bank'] not in top['pinout']['banks']:
|
||||||
|
log.warning('Unkown io power bank {}'.format(pad['bank']))
|
||||||
|
error += 1
|
||||||
|
|
||||||
|
if pad['connection'] not in valid_connections:
|
||||||
|
log.warning('Connection type {} of pad {} is invalid'
|
||||||
|
.format(pad['connection'], pad['name']))
|
||||||
|
error += 1
|
||||||
|
|
||||||
|
return error
|
||||||
|
|
||||||
|
|
||||||
|
def check_pinout(top: Dict, prefix: str) -> int:
|
||||||
|
error = check_keys(top['pinout'], pinout_required, pinout_optional,
|
||||||
|
pinout_added, prefix + ' Pinout')
|
||||||
|
|
||||||
|
known_names = {}
|
||||||
|
for pad in top['pinout']['pads']:
|
||||||
|
error += check_keys(pad, pad_required, pad_optional,
|
||||||
|
pad_added, prefix + ' Pinout')
|
||||||
|
|
||||||
|
error += check_pad(top, pad, known_names,
|
||||||
|
['direct', 'manual', 'muxed'],
|
||||||
|
prefix + ' Pad')
|
||||||
|
|
||||||
|
return error
|
||||||
|
|
||||||
|
|
||||||
|
def check_pinmux(top: Dict, prefix: str) -> int:
|
||||||
|
error = check_keys(top['pinmux'], pinmux_required, pinmux_optional,
|
||||||
|
pinmux_added, prefix + ' Pinmux')
|
||||||
|
|
||||||
|
# This is used for the direct connection accounting below,
|
||||||
|
# where we tick off already connected direct pads.
|
||||||
|
known_direct_pads = {}
|
||||||
|
direct_pad_attr = {}
|
||||||
|
for pad in top['pinout']['pads']:
|
||||||
|
if pad['connection'] == 'direct':
|
||||||
|
known_direct_pads[pad['name']] = 1
|
||||||
|
direct_pad_attr[pad['name']] = pad['type']
|
||||||
|
|
||||||
|
# Note: the actual signal crosscheck is deferred until the merge stage,
|
||||||
|
# since we have no idea at this point which IOs comportable IPs expose.
|
||||||
|
for sig in top['pinmux']['signals']:
|
||||||
|
error += check_keys(sig, pinmux_sig_required, pinmux_sig_optional,
|
||||||
|
pinmux_sig_added, prefix + ' Pinmux signal')
|
||||||
|
|
||||||
|
if sig['connection'] not in ['direct', 'manual', 'muxed']:
|
||||||
|
log.warning('Invalid connection type {}'.format(sig['connection']))
|
||||||
|
error += 1
|
||||||
|
|
||||||
|
# The pad needs to refer to a valid pad name in the pinout that is of
|
||||||
|
# connection type "direct". We tick off all direct pads that have been
|
||||||
|
# referenced in order to make sure there are no double connections
|
||||||
|
# and unconnected direct pads.
|
||||||
|
padname = sig.setdefault('pad', '')
|
||||||
|
if padname != '':
|
||||||
|
if padname in known_direct_pads:
|
||||||
|
if known_direct_pads[padname] == 1:
|
||||||
|
known_direct_pads[padname] = 0
|
||||||
|
padattr = direct_pad_attr[padname]
|
||||||
|
else:
|
||||||
|
log.warning('Warning, direct pad {} is already connected'
|
||||||
|
.format(padname))
|
||||||
|
error += 1
|
||||||
|
else:
|
||||||
|
log.warning('Unknown direct pad {}'.format(padname))
|
||||||
|
error += 1
|
||||||
|
|
||||||
|
# Check port naming scheme.
|
||||||
|
port = sig.setdefault('port', '')
|
||||||
|
pattern = r'^[a-zA-Z0-9_]*(\[[0-9]*\]){0,1}'
|
||||||
|
matches = re.match(pattern, port)
|
||||||
|
if matches is None:
|
||||||
|
log.warning('Port name {} has wrong format'
|
||||||
|
.format(port))
|
||||||
|
error += 1
|
||||||
|
|
||||||
|
# Check that only direct connections have pad keys
|
||||||
|
if sig['connection'] == 'direct':
|
||||||
|
if sig.setdefault('attr', '') != '':
|
||||||
|
log.warning('Direct connection of instance {} port {} '
|
||||||
|
'must not have an associated pad attribute field'
|
||||||
|
.format(sig['instance'],
|
||||||
|
sig['port']))
|
||||||
|
error += 1
|
||||||
|
# Since the signal is directly connected, we can automatically infer
|
||||||
|
# the pad type needed to instantiate the correct attribute CSR WARL
|
||||||
|
# module inside the pinmux.
|
||||||
|
sig['attr'] = padattr
|
||||||
|
|
||||||
|
if padname == '':
|
||||||
|
log.warning('Instance {} port {} connection is of direct type '
|
||||||
|
'and therefore must have an associated pad name.'
|
||||||
|
.format(sig['instance'],
|
||||||
|
sig['port']))
|
||||||
|
error += 1
|
||||||
|
if port == '':
|
||||||
|
log.warning('Instance {} port {} connection is of direct type '
|
||||||
|
'and therefore must have an associated port name.'
|
||||||
|
.format(sig['instance'],
|
||||||
|
sig['port']))
|
||||||
|
error += 1
|
||||||
|
elif sig['connection'] == 'muxed':
|
||||||
|
# Muxed signals do not have a corresponding pad and attribute CSR,
|
||||||
|
# since they first go through the pinmux matrix.
|
||||||
|
if sig.setdefault('attr', '') != '':
|
||||||
|
log.warning('Muxed connection of instance {} port {} '
|
||||||
|
'must not have an associated pad attribute field'
|
||||||
|
.format(sig['instance'],
|
||||||
|
sig['port']))
|
||||||
|
error += 1
|
||||||
|
if padname != '':
|
||||||
|
log.warning('Muxed connection of instance {} port {} '
|
||||||
|
'must not have an associated pad'
|
||||||
|
.format(sig['instance'],
|
||||||
|
sig['port']))
|
||||||
|
error += 1
|
||||||
|
elif sig['connection'] == 'manual':
|
||||||
|
# This pad attr key is only allowed in the manual case,
|
||||||
|
# as there is no way to infer the pad type automatically.
|
||||||
|
sig.setdefault('attr', 'BidirStd')
|
||||||
|
if padname != '':
|
||||||
|
log.warning('Manual connection of instance {} port {} '
|
||||||
|
'must not have an associated pad'
|
||||||
|
.format(sig['instance'],
|
||||||
|
sig['port']))
|
||||||
|
error += 1
|
||||||
|
|
||||||
|
# At this point, all direct pads should have been ticked off.
|
||||||
|
for key, val in known_direct_pads.items():
|
||||||
|
if val == 1:
|
||||||
|
log.warning('Direct pad {} has not been connected'
|
||||||
|
.format(key))
|
||||||
|
error += 1
|
||||||
|
|
||||||
|
return error
|
||||||
|
|
||||||
|
|
||||||
|
def check_implementation_targets(top: Dict, prefix: str) -> int:
|
||||||
|
error = 0
|
||||||
|
known_names = {}
|
||||||
|
for target in top['targets']:
|
||||||
|
error += check_keys(target, target_required, target_optional,
|
||||||
|
target_added, prefix + ' Targets')
|
||||||
|
|
||||||
|
# check name uniqueness
|
||||||
|
if target['name'] in known_names:
|
||||||
|
log.warning('Target name {} is not unique'.format(target['name']))
|
||||||
|
error += 1
|
||||||
|
known_names[target['name']] = 1
|
||||||
|
|
||||||
|
error += check_keys(target['pinmux'], target_pinmux_required, target_pinmux_optional,
|
||||||
|
target_pinmux_added, prefix + ' Target pinmux')
|
||||||
|
|
||||||
|
error += check_keys(target['pinout'], target_pinout_required, target_pinout_optional,
|
||||||
|
target_pinout_added, prefix + ' Target pinout')
|
||||||
|
|
||||||
|
# Check special pad signals
|
||||||
|
known_entry_names = {}
|
||||||
|
for entry in target['pinmux']['special_signals']:
|
||||||
|
error += check_keys(entry, special_sig_required, special_sig_optional,
|
||||||
|
special_sig_added, prefix + ' Special signal')
|
||||||
|
|
||||||
|
# check name uniqueness
|
||||||
|
if entry['name'] in known_entry_names:
|
||||||
|
log.warning('Special pad name {} is not unique'.format(entry['name']))
|
||||||
|
error += 1
|
||||||
|
known_entry_names[entry['name']] = 1
|
||||||
|
|
||||||
|
# The pad key needs to refer to a valid pad name.
|
||||||
|
is_muxed = False
|
||||||
|
for pad in top['pinout']['pads']:
|
||||||
|
if entry['pad'] == pad['name']:
|
||||||
|
is_muxed = pad['connection'] == 'muxed'
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
log.warning('Unknown pad {}'.format(entry['pad']))
|
||||||
|
error += 1
|
||||||
|
|
||||||
|
if not is_muxed:
|
||||||
|
# If this is not a muxed pad, we need to make sure this refers to
|
||||||
|
# DIO that is NOT a manual pad.
|
||||||
|
for sig in top['pinmux']['signals']:
|
||||||
|
if entry['pad'] == sig['pad']:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
log.warning('Special pad {} cannot refer to a manual pad'.format(entry['pad']))
|
||||||
|
error += 1
|
||||||
|
|
||||||
|
# Check pads to remove and stub out
|
||||||
|
for entry in target['pinout']['remove_pads']:
|
||||||
|
# The pad key needs to refer to a valid pad name.
|
||||||
|
for pad in top['pinout']['pads']:
|
||||||
|
if entry == pad['name']:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
log.warning('Unknown pad {}'.format(entry))
|
||||||
|
error += 1
|
||||||
|
|
||||||
|
# Check pads to add
|
||||||
|
known_pad_names = {}
|
||||||
|
for pad in top['pinout']['pads']:
|
||||||
|
known_pad_names.update({pad['name']: 1})
|
||||||
|
|
||||||
|
for pad in target['pinout']['add_pads']:
|
||||||
|
error += check_pad(top, pad, known_pad_names, ['manual'],
|
||||||
|
prefix + ' Additional Pad')
|
||||||
|
|
||||||
|
return error
|
||||||
|
|
||||||
|
|
||||||
|
def check_clocks_resets(top, ipobjs, ip_idxs, xbarobjs, xbar_idxs):
|
||||||
|
|
||||||
|
error = 0
|
||||||
|
|
||||||
|
# there should only be one each of pwrmgr/clkmgr/rstmgr
|
||||||
|
pwrmgrs = [m for m in top['module'] if m['type'] == 'pwrmgr']
|
||||||
|
clkmgrs = [m for m in top['module'] if m['type'] == 'clkmgr']
|
||||||
|
rstmgrs = [m for m in top['module'] if m['type'] == 'rstmgr']
|
||||||
|
|
||||||
|
if len(pwrmgrs) == 1 * len(clkmgrs) == 1 * len(rstmgrs) != 1:
|
||||||
|
log.error("Incorrect number of pwrmgr/clkmgr/rstmgr")
|
||||||
|
error += 1
|
||||||
|
|
||||||
|
# all defined clock/reset nets
|
||||||
|
reset_nets = [reset['name'] for reset in top['resets']['nodes']]
|
||||||
|
clock_srcs = list(top['clocks'].all_srcs.keys())
|
||||||
|
|
||||||
|
# Check clock/reset port connection for all IPs
|
||||||
|
for ipcfg in top['module']:
|
||||||
|
ipcfg_name = ipcfg['name'].lower()
|
||||||
|
log.info("Checking clock/resets for %s" % ipcfg_name)
|
||||||
|
error += validate_reset(ipcfg, ipobjs[ip_idxs[ipcfg_name]], reset_nets)
|
||||||
|
error += validate_clock(ipcfg, ipobjs[ip_idxs[ipcfg_name]], clock_srcs)
|
||||||
|
|
||||||
|
if error:
|
||||||
|
log.error("module clock/reset checking failed")
|
||||||
|
break
|
||||||
|
|
||||||
|
# Check clock/reset port connection for all xbars
|
||||||
|
for xbarcfg in top['xbar']:
|
||||||
|
xbarcfg_name = xbarcfg['name'].lower()
|
||||||
|
log.info("Checking clock/resets for xbar %s" % xbarcfg_name)
|
||||||
|
error += validate_reset(xbarcfg, xbarobjs[xbar_idxs[xbarcfg_name]],
|
||||||
|
reset_nets, "xbar")
|
||||||
|
error += validate_clock(xbarcfg, xbarobjs[xbar_idxs[xbarcfg_name]],
|
||||||
|
clock_srcs, "xbar")
|
||||||
|
|
||||||
|
if error:
|
||||||
|
log.error("xbar clock/reset checking failed")
|
||||||
|
break
|
||||||
|
|
||||||
|
return error
|
||||||
|
|
||||||
|
|
||||||
|
# Checks the following
|
||||||
|
# For each defined reset connection in top*.hjson, there exists a defined port at the destination
|
||||||
|
# and defined reset net
|
||||||
|
# There are the same number of defined connections as there are ports
|
||||||
|
def validate_reset(top, inst, reset_nets, prefix=""):
|
||||||
|
# Gather inst port list
|
||||||
|
error = 0
|
||||||
|
|
||||||
|
# Handle either an IpBlock (generated by reggen) or an OrderedDict
|
||||||
|
# (generated by topgen for a crossbar)
|
||||||
|
if isinstance(inst, IpBlock):
|
||||||
|
name = inst.name
|
||||||
|
reset_signals = inst.clocking.reset_signals()
|
||||||
|
else:
|
||||||
|
name = inst['name']
|
||||||
|
reset_signals = ([inst.get('reset_primary', 'rst_ni')] +
|
||||||
|
inst.get('other_reset_list', []))
|
||||||
|
|
||||||
|
log.info("%s %s resets are %s" %
|
||||||
|
(prefix, name, reset_signals))
|
||||||
|
|
||||||
|
if len(top['reset_connections']) != len(reset_signals):
|
||||||
|
error += 1
|
||||||
|
log.error("%s %s mismatched number of reset ports and nets" %
|
||||||
|
(prefix, name))
|
||||||
|
|
||||||
|
missing_port = [
|
||||||
|
port for port in top['reset_connections'].keys()
|
||||||
|
if port not in reset_signals
|
||||||
|
]
|
||||||
|
|
||||||
|
if missing_port:
|
||||||
|
error += 1
|
||||||
|
log.error("%s %s Following reset ports do not exist:" %
|
||||||
|
(prefix, name))
|
||||||
|
[log.error("%s" % port) for port in missing_port]
|
||||||
|
|
||||||
|
missing_net = [
|
||||||
|
net for port, net in top['reset_connections'].items()
|
||||||
|
if net not in reset_nets
|
||||||
|
]
|
||||||
|
|
||||||
|
if missing_net:
|
||||||
|
error += 1
|
||||||
|
log.error("%s %s Following reset nets do not exist:" %
|
||||||
|
(prefix, name))
|
||||||
|
[log.error("%s" % net) for net in missing_net]
|
||||||
|
|
||||||
|
return error
|
||||||
|
|
||||||
|
|
||||||
|
# Checks the following
|
||||||
|
# For each defined clock_src in top*.hjson, there exists a defined port at the destination
|
||||||
|
# and defined clock source
|
||||||
|
# There are the same number of defined connections as there are ports
|
||||||
|
def validate_clock(top, inst, clock_srcs, prefix=""):
|
||||||
|
# Gather inst port list
|
||||||
|
error = 0
|
||||||
|
|
||||||
|
# Handle either an IpBlock (generated by reggen) or an OrderedDict
|
||||||
|
# (generated by topgen for a crossbar)
|
||||||
|
if isinstance(inst, IpBlock):
|
||||||
|
name = inst.name
|
||||||
|
clock_signals = inst.clocking.clock_signals()
|
||||||
|
else:
|
||||||
|
name = inst['name']
|
||||||
|
clock_signals = ([inst.get('clock_primary', 'rst_ni')] +
|
||||||
|
inst.get('other_clock_list', []))
|
||||||
|
|
||||||
|
if len(top['clock_srcs']) != len(clock_signals):
|
||||||
|
error += 1
|
||||||
|
log.error("%s %s mismatched number of clock ports and nets" %
|
||||||
|
(prefix, name))
|
||||||
|
|
||||||
|
missing_port = [
|
||||||
|
port for port in top['clock_srcs'].keys()
|
||||||
|
if port not in clock_signals
|
||||||
|
]
|
||||||
|
|
||||||
|
if missing_port:
|
||||||
|
error += 1
|
||||||
|
log.error("%s %s Following clock ports do not exist:" %
|
||||||
|
(prefix, name))
|
||||||
|
[log.error("%s" % port) for port in missing_port]
|
||||||
|
|
||||||
|
missing_net = [
|
||||||
|
net for port, net in top['clock_srcs'].items() if net not in clock_srcs
|
||||||
|
]
|
||||||
|
|
||||||
|
if missing_net:
|
||||||
|
error += 1
|
||||||
|
log.error("%s %s Following clock nets do not exist:" %
|
||||||
|
(prefix, name))
|
||||||
|
[log.error("%s" % net) for net in missing_net]
|
||||||
|
|
||||||
|
return error
|
||||||
|
|
||||||
|
|
||||||
|
def check_flash(top):
|
||||||
|
error = 0
|
||||||
|
|
||||||
|
for mem in top['memory']:
|
||||||
|
if mem['type'] == "eflash":
|
||||||
|
error = check_keys(mem, eflash_required, eflash_optional,
|
||||||
|
eflash_added, "Eflash")
|
||||||
|
|
||||||
|
flash = Flash(mem)
|
||||||
|
error += 1 if not flash.check_values() else 0
|
||||||
|
|
||||||
|
if error:
|
||||||
|
log.error("Flash check failed")
|
||||||
|
else:
|
||||||
|
flash.populate(mem)
|
||||||
|
|
||||||
|
return error
|
||||||
|
|
||||||
|
|
||||||
|
def check_power_domains(top):
|
||||||
|
error = 0
|
||||||
|
|
||||||
|
# check that the default domain is valid
|
||||||
|
if top['power']['default'] not in top['power']['domains']:
|
||||||
|
error += 1
|
||||||
|
return error
|
||||||
|
|
||||||
|
# Check that each module, xbar, memory has a power domain defined.
|
||||||
|
# If not, give it a default.
|
||||||
|
# If there is one defined, check that it is a valid definition
|
||||||
|
for end_point in top['module'] + top['memory'] + top['xbar']:
|
||||||
|
if 'domain' not in end_point:
|
||||||
|
end_point['domain'] = top['power']['default']
|
||||||
|
|
||||||
|
if end_point['domain'] not in top['power']['domains']:
|
||||||
|
log.error("{} defined invalid domain {}"
|
||||||
|
.format(end_point['name'],
|
||||||
|
end_point['domain']))
|
||||||
|
error += 1
|
||||||
|
return error
|
||||||
|
|
||||||
|
# arrived without incident, return
|
||||||
|
return error
|
||||||
|
|
||||||
|
|
||||||
|
def check_modules(top, prefix):
|
||||||
|
error = 0
|
||||||
|
for m in top['module']:
|
||||||
|
modname = m.get("name", "unnamed module")
|
||||||
|
error += check_keys(m, module_required, module_optional, module_added,
|
||||||
|
prefix + " " + modname)
|
||||||
|
|
||||||
|
# these fields are mutually exclusive
|
||||||
|
if 'base_addr' in m and 'base_addrs' in m:
|
||||||
|
log.error("{} {} a module cannot define both the 'base_addr' "
|
||||||
|
"and 'base_addrs' keys at the same time"
|
||||||
|
.format(prefix, modname))
|
||||||
|
error += 1
|
||||||
|
|
||||||
|
if 'base_addrs' in m and 'memory' in m:
|
||||||
|
for intf, value in m['memory'].items():
|
||||||
|
error += check_keys(value, memory_required,
|
||||||
|
memory_optional, memory_added,
|
||||||
|
prefix + " " + modname + " " + intf)
|
||||||
|
# make sure the memory regions correspond to the TL-UL interfaces
|
||||||
|
if intf not in m['base_addrs']:
|
||||||
|
log.error("{} {} memory region {} does not "
|
||||||
|
"correspond to any of the defined "
|
||||||
|
"TL-UL interfaces".format(prefix, modname, intf))
|
||||||
|
error += 1
|
||||||
|
# make sure the linker region access attribute is valid
|
||||||
|
attr = value.get('swaccess', 'unknown attribute')
|
||||||
|
if attr not in ['r', 'rw', 'rx', 'rwx']:
|
||||||
|
log.error('{} {} swaccess attribute {} of memory region {} '
|
||||||
|
'is not valid'.format(prefix, modname, attr, intf))
|
||||||
|
error += 1
|
||||||
|
return error
|
||||||
|
|
||||||
|
|
||||||
|
def validate_top(top, ipobjs, xbarobjs):
|
||||||
|
# return as it is for now
|
||||||
|
error = check_keys(top, top_required, top_optional, top_added, "top")
|
||||||
|
|
||||||
|
if error != 0:
|
||||||
|
log.error("Top HJSON has top level errors. Aborting")
|
||||||
|
return top, error
|
||||||
|
|
||||||
|
component = top['name']
|
||||||
|
|
||||||
|
# Check module instantiations
|
||||||
|
error += check_modules(top, component)
|
||||||
|
|
||||||
|
# MODULE check
|
||||||
|
err, ip_idxs = check_target(top, ipobjs, Target(TargetType.MODULE))
|
||||||
|
error += err
|
||||||
|
|
||||||
|
# XBAR check
|
||||||
|
err, xbar_idxs = check_target(top, xbarobjs, Target(TargetType.XBAR))
|
||||||
|
error += err
|
||||||
|
|
||||||
|
# MEMORY check
|
||||||
|
error += check_flash(top)
|
||||||
|
|
||||||
|
# Power domain check
|
||||||
|
error += check_power_domains(top)
|
||||||
|
|
||||||
|
# Clock / Reset check
|
||||||
|
error += check_clocks_resets(top, ipobjs, ip_idxs, xbarobjs, xbar_idxs)
|
||||||
|
|
||||||
|
# RV_PLIC check
|
||||||
|
|
||||||
|
# Pinout, pinmux and target checks
|
||||||
|
# Note that these checks must happen in this order, as
|
||||||
|
# the pinmux and target configs depend on the pinout.
|
||||||
|
error += check_pinout(top, component)
|
||||||
|
error += check_pinmux(top, component)
|
||||||
|
error += check_implementation_targets(top, component)
|
||||||
|
|
||||||
|
return top, error
|
Loading…
Reference in New Issue