2023-08-21 18:55:11 +00:00
|
|
|
###############################################################################
|
|
|
|
## Copyright (C) 2022-2023 Analog Devices, Inc. All rights reserved.
|
|
|
|
### SPDX short identifier: ADIBSD
|
|
|
|
###############################################################################
|
|
|
|
|
2023-08-01 20:39:09 +00:00
|
|
|
import os.path
|
2023-11-21 12:56:25 +00:00
|
|
|
import contextlib
|
|
|
|
import re
|
2023-08-16 12:57:14 +00:00
|
|
|
from docutils import nodes
|
|
|
|
from docutils.statemachine import ViewList
|
|
|
|
from docutils.parsers.rst import Directive, directives
|
|
|
|
from sphinx.util.nodes import nested_parse_with_titles
|
|
|
|
from sphinx.util import logging
|
2023-08-01 20:39:09 +00:00
|
|
|
from lxml import etree
|
2023-08-07 19:31:41 +00:00
|
|
|
from adi_hdl_static import hdl_strings
|
2023-12-04 22:57:35 +00:00
|
|
|
from adi_hdl_render import hdl_component
|
2023-08-07 19:31:41 +00:00
|
|
|
from uuid import uuid4
|
2023-10-06 12:23:50 +00:00
|
|
|
from hashlib import sha1
|
2023-08-16 12:57:14 +00:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2023-08-07 19:31:41 +00:00
|
|
|
dft_hide_collapsible_content = True
|
|
|
|
|
|
|
|
class node_base(nodes.Element, nodes.General):
|
|
|
|
"""
|
|
|
|
Adapted from
|
|
|
|
https://github.com/pradyunsg/sphinx-inline-tabs
|
|
|
|
https://github.com/dgarcia360/sphinx-collapse
|
|
|
|
"""
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def visit(translator, node):
|
|
|
|
attributes = node.attributes.copy()
|
|
|
|
|
|
|
|
attributes.pop("ids")
|
|
|
|
attributes.pop("classes")
|
|
|
|
attributes.pop("names")
|
|
|
|
attributes.pop("dupnames")
|
|
|
|
attributes.pop("backrefs")
|
|
|
|
|
|
|
|
text = translator.starttag(node, node.tagname, **attributes)
|
|
|
|
translator.body.append(text.strip())
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def depart(translator, node):
|
|
|
|
if node.endtag:
|
|
|
|
translator.body.append(f"</{node.tagname}>")
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def default(translator, node):
|
|
|
|
pass
|
|
|
|
|
|
|
|
class node_div(node_base):
|
|
|
|
tagname = 'div'
|
|
|
|
endtag = 'true'
|
|
|
|
|
|
|
|
class node_input(node_base):
|
|
|
|
tagname = 'input'
|
|
|
|
endtag = 'false'
|
|
|
|
|
|
|
|
class node_label(node_base):
|
|
|
|
tagname = 'label'
|
|
|
|
endtag = 'true'
|
|
|
|
|
|
|
|
class node_icon(node_base):
|
|
|
|
tagname = 'div'
|
|
|
|
endtag = 'false'
|
2023-08-16 12:57:14 +00:00
|
|
|
|
2023-08-01 20:39:09 +00:00
|
|
|
def dot_fix(string):
|
|
|
|
if (string.rfind('.') != len(string)-1):
|
|
|
|
return string + '.'
|
|
|
|
else:
|
|
|
|
return string
|
|
|
|
|
|
|
|
def pretty_dep(string):
|
|
|
|
if string is None:
|
|
|
|
return ''
|
|
|
|
return string.replace("'MODELPARAM_VALUE.",'').replace("'",'')
|
|
|
|
|
|
|
|
class directive_base(Directive):
|
2023-08-16 12:57:14 +00:00
|
|
|
has_content = True
|
|
|
|
add_index = True
|
|
|
|
current_doc = ''
|
2023-08-01 20:39:09 +00:00
|
|
|
final_argument_whitespace = True
|
2023-08-16 12:57:14 +00:00
|
|
|
|
2023-08-07 19:31:41 +00:00
|
|
|
@staticmethod
|
|
|
|
def get_descriptions(content):
|
2023-08-01 20:39:09 +00:00
|
|
|
items = {}
|
|
|
|
key = ''
|
2023-08-16 12:57:14 +00:00
|
|
|
for line in content:
|
|
|
|
if line.startswith('* -'):
|
2023-08-01 20:39:09 +00:00
|
|
|
key = line[line.find('* -')+3:].split()[0]
|
|
|
|
items[key] = []
|
2023-08-16 12:57:14 +00:00
|
|
|
else:
|
2023-08-01 20:39:09 +00:00
|
|
|
items[key].append(line)
|
|
|
|
for key in items:
|
2023-08-21 18:55:11 +00:00
|
|
|
items[key] = ' '.join(items[key]).replace('-', '', 1).strip()
|
2023-08-01 20:39:09 +00:00
|
|
|
return items
|
2023-08-16 12:57:14 +00:00
|
|
|
|
2023-08-07 19:31:41 +00:00
|
|
|
def column_entry(self, row, text, node_type, classes=[]):
|
|
|
|
entry = nodes.entry(classes=classes)
|
|
|
|
if node_type == 'literal':
|
|
|
|
entry += nodes.literal(text=text)
|
|
|
|
elif node_type == 'paragraph':
|
|
|
|
entry += nodes.paragraph(text=text)
|
|
|
|
elif node_type == 'reST':
|
|
|
|
rst = ViewList()
|
|
|
|
rst.append(text, f"virtual_{str(uuid4())}", 0)
|
|
|
|
node = nodes.section()
|
|
|
|
node.document = self.state.document
|
|
|
|
nested_parse_with_titles(self.state, rst, node)
|
|
|
|
entry += node
|
docs: links, drop part, fixups, codeowners
Drop part role, use generic adi instead for root adi domain links.
For future reference, the snipped used was:
find ./docs/projects -type f -exec sed -i 's/:part:/:adi:/g' {} \;
Drop Containerfile.
Add option to validate links status (e.g. 200, 404), intended mostly for CI
use to check if a page has disappeared from the internet.
Validate links uses coroutines to launch multiple tasks concurrently,
but do it in bundles to avoid being rate limited.
Fixup regmap styling.
Add imoldovan, jmarques, spop, lbarbosa as docs codeowners.
Remove branch field for links to the hdl repo.
Change git role to display full path.
Fixup ZedBoard link label, remove IP List, add SYSID_ROM dokuwiki link
in ad716_sdz project.
Signed-off-by: Jorge Marques <jorge.marques@analog.com>
2023-11-13 15:42:46 +00:00
|
|
|
elif node_type == 'default_value':
|
|
|
|
if text[0:2] != '0x':
|
|
|
|
rst = ViewList()
|
|
|
|
rst.append(text, f"virtual_{str(uuid4())}", 0)
|
|
|
|
node = nodes.section()
|
|
|
|
node.document = self.state.document
|
|
|
|
nested_parse_with_titles(self.state, rst, node)
|
|
|
|
entry += node
|
|
|
|
else:
|
|
|
|
entry += nodes.literal(text=text)
|
2023-08-07 19:31:41 +00:00
|
|
|
else:
|
|
|
|
return
|
|
|
|
row += entry
|
|
|
|
|
|
|
|
def column_entries(self, rows, items):
|
|
|
|
row = nodes.row()
|
|
|
|
for item in items:
|
|
|
|
if len(item) == 3:
|
|
|
|
self.column_entry(row, item[0], item[1], classes=item[2])
|
|
|
|
else:
|
|
|
|
self.column_entry(row, item[0], item[1])
|
|
|
|
rows.append(row)
|
|
|
|
|
2023-08-01 20:39:09 +00:00
|
|
|
def generic_table(self, description):
|
|
|
|
tgroup = nodes.tgroup(cols=2)
|
|
|
|
for _ in range(2):
|
|
|
|
colspec = nodes.colspec(colwidth=1)
|
|
|
|
tgroup.append(colspec)
|
2023-08-16 12:57:14 +00:00
|
|
|
table = nodes.table()
|
2023-08-01 20:39:09 +00:00
|
|
|
table += tgroup
|
2023-08-16 12:57:14 +00:00
|
|
|
|
2023-08-01 20:39:09 +00:00
|
|
|
self.table_header(tgroup, ["Name", "Description"])
|
2023-08-16 12:57:14 +00:00
|
|
|
|
2023-08-01 20:39:09 +00:00
|
|
|
rows = []
|
|
|
|
for key in description:
|
|
|
|
row = nodes.row()
|
|
|
|
entry = nodes.entry()
|
|
|
|
entry += nodes.literal(text="{:s}".format(key))
|
|
|
|
row += entry
|
|
|
|
entry = nodes.entry()
|
|
|
|
rst = ViewList()
|
2023-08-07 19:31:41 +00:00
|
|
|
rst.append(description[key], f"virtual_{str(uuid4())}", 0)
|
2023-08-01 20:39:09 +00:00
|
|
|
node = nodes.section()
|
|
|
|
node.document = self.state.document
|
|
|
|
nested_parse_with_titles(self.state, rst, node)
|
|
|
|
entry += node
|
|
|
|
row += entry
|
|
|
|
rows.append(row)
|
2023-08-16 12:57:14 +00:00
|
|
|
|
2023-08-01 20:39:09 +00:00
|
|
|
tbody = nodes.tbody()
|
|
|
|
tbody.extend(rows)
|
|
|
|
tgroup += tbody
|
|
|
|
|
|
|
|
return table
|
2023-08-16 12:57:14 +00:00
|
|
|
|
2023-08-07 19:31:41 +00:00
|
|
|
@staticmethod
|
|
|
|
def table_header(tgroup, columns):
|
2023-08-16 12:57:14 +00:00
|
|
|
thead = nodes.thead()
|
|
|
|
tgroup += thead
|
|
|
|
row = nodes.row()
|
|
|
|
|
2023-08-01 20:39:09 +00:00
|
|
|
for header_name in columns:
|
2023-08-16 12:57:14 +00:00
|
|
|
entry = nodes.entry()
|
|
|
|
entry += nodes.paragraph(text=header_name)
|
|
|
|
row += entry
|
|
|
|
|
|
|
|
thead.append(row)
|
|
|
|
|
2023-08-07 19:31:41 +00:00
|
|
|
def collapsible(self, section, text=""):
|
|
|
|
env = self.state.document.settings.env
|
|
|
|
|
2023-10-06 12:23:50 +00:00
|
|
|
_id = sha1(text.encode('utf-8')).hexdigest()
|
2023-08-07 19:31:41 +00:00
|
|
|
container = nodes.container(
|
|
|
|
"",
|
|
|
|
is_div=True,
|
|
|
|
classes=['collapsible']
|
|
|
|
)
|
|
|
|
checked = {"checked": ''} if not env.config.hide_collapsible_content else {}
|
|
|
|
input_ = node_input(
|
|
|
|
type="checkbox",
|
|
|
|
**checked,
|
|
|
|
ids=[_id],
|
|
|
|
name=_id,
|
|
|
|
classes=['collapsible_input']
|
|
|
|
)
|
|
|
|
label = node_label(
|
|
|
|
**{"for": _id}
|
|
|
|
)
|
|
|
|
icon = node_icon(
|
|
|
|
classes=['icon']
|
|
|
|
)
|
|
|
|
content = nodes.container(
|
|
|
|
"",
|
|
|
|
is_div=True,
|
|
|
|
classes=['collapsible_content']
|
|
|
|
)
|
|
|
|
label += nodes.paragraph(text=text)
|
2023-08-21 18:55:11 +00:00
|
|
|
label += icon
|
2023-08-07 19:31:41 +00:00
|
|
|
|
|
|
|
container += input_
|
|
|
|
container += label
|
|
|
|
container += content
|
|
|
|
|
|
|
|
section += container
|
|
|
|
|
|
|
|
return (content, label)
|
|
|
|
|
2023-08-21 18:55:11 +00:00
|
|
|
class directive_collapsible(directive_base):
|
|
|
|
option_spec = {'path': directives.unchanged}
|
|
|
|
required_arguments = 1
|
|
|
|
optional_arguments = 0
|
|
|
|
|
|
|
|
def run(self):
|
|
|
|
self.assert_has_content()
|
|
|
|
|
|
|
|
env = self.state.document.settings.env
|
|
|
|
self.current_doc = env.doc2path(env.docname)
|
|
|
|
|
|
|
|
node = node_div()
|
|
|
|
|
|
|
|
content, _ = self.collapsible(node, self.arguments[0].strip())
|
|
|
|
self.state.nested_parse(self.content, self.content_offset, content)
|
|
|
|
|
|
|
|
return [ node ]
|
2023-08-07 19:31:41 +00:00
|
|
|
|
2023-08-01 20:39:09 +00:00
|
|
|
class directive_interfaces(directive_base):
|
2023-08-07 19:31:41 +00:00
|
|
|
option_spec = {'path': directives.unchanged}
|
2023-08-01 20:39:09 +00:00
|
|
|
required_arguments = 0
|
|
|
|
optional_arguments = 0
|
|
|
|
|
2023-08-07 19:31:41 +00:00
|
|
|
def tables(self, subnode, content, component):
|
2023-08-01 20:39:09 +00:00
|
|
|
description = self.get_descriptions(content)
|
|
|
|
|
|
|
|
if component is None:
|
|
|
|
return self.generic_table(description)
|
|
|
|
|
|
|
|
bs = component['bus_interface']
|
|
|
|
for tag in bs:
|
2023-08-07 19:31:41 +00:00
|
|
|
section = nodes.section(
|
|
|
|
ids=[f"bus-interface-{tag}"]
|
|
|
|
)
|
2023-08-01 20:39:09 +00:00
|
|
|
title = nodes.title(text=tag)
|
|
|
|
section += title
|
2023-08-07 19:31:41 +00:00
|
|
|
|
2023-08-01 20:39:09 +00:00
|
|
|
if bs[tag]['dependency'] is not None:
|
2023-08-21 18:55:11 +00:00
|
|
|
section += [nodes.inline(text="Enabled if "),
|
|
|
|
nodes.literal(text=pretty_dep(bs[tag]['dependency'])),
|
|
|
|
nodes.inline(text=".")]
|
2023-08-01 20:39:09 +00:00
|
|
|
if tag in description:
|
|
|
|
rst = ViewList()
|
2023-08-07 19:31:41 +00:00
|
|
|
rst.append(description[tag], f"virtual_{str(uuid4())}", 0)
|
2023-08-01 20:39:09 +00:00
|
|
|
node = nodes.section()
|
|
|
|
node.document = self.state.document
|
|
|
|
nested_parse_with_titles(self.state, rst, node)
|
|
|
|
section += node
|
|
|
|
|
2023-08-07 19:31:41 +00:00
|
|
|
content, _ = self.collapsible(section, f"Ports of {tag} bus.")
|
|
|
|
|
2023-08-01 20:39:09 +00:00
|
|
|
tgroup = nodes.tgroup(cols=3)
|
|
|
|
for _ in range(3):
|
|
|
|
colspec = nodes.colspec(colwidth=1)
|
|
|
|
tgroup.append(colspec)
|
|
|
|
table = nodes.table()
|
|
|
|
table += tgroup
|
|
|
|
|
|
|
|
self.table_header(tgroup, ["Physical Port", "Logical Port", "Direction"])
|
|
|
|
|
|
|
|
rows = []
|
|
|
|
pm = bs[tag]['port_map']
|
|
|
|
for key in pm:
|
2023-08-07 19:31:41 +00:00
|
|
|
self.column_entries(rows, [
|
|
|
|
[key, 'literal'],
|
|
|
|
[pm[key]['logical_port'], 'literal'],
|
|
|
|
[pm[key]['direction'], 'paragraph'],
|
|
|
|
])
|
2023-08-01 20:39:09 +00:00
|
|
|
|
|
|
|
tbody = nodes.tbody()
|
|
|
|
tbody.extend(rows)
|
|
|
|
tgroup += tbody
|
2023-08-07 19:31:41 +00:00
|
|
|
content += table
|
2023-08-01 20:39:09 +00:00
|
|
|
|
2023-08-07 19:31:41 +00:00
|
|
|
subnode += section
|
|
|
|
|
|
|
|
section = nodes.section(ids=[f"ports"])
|
2023-08-01 20:39:09 +00:00
|
|
|
title = nodes.title(text="Ports")
|
|
|
|
section += title
|
2023-08-07 19:31:41 +00:00
|
|
|
content, _ = self.collapsible(section, f"Ports table.")
|
|
|
|
|
2023-08-01 20:39:09 +00:00
|
|
|
tgroup = nodes.tgroup(cols=4)
|
|
|
|
for _ in range(4):
|
|
|
|
colspec = nodes.colspec(colwidth=1)
|
|
|
|
tgroup.append(colspec)
|
|
|
|
table = nodes.table()
|
|
|
|
table += tgroup
|
|
|
|
|
2023-08-07 19:31:41 +00:00
|
|
|
self.table_header(tgroup, ["Physical Port", "Direction", "Dependency", "Description"])
|
2023-08-01 20:39:09 +00:00
|
|
|
|
2023-08-16 12:57:14 +00:00
|
|
|
rows = []
|
2023-08-01 20:39:09 +00:00
|
|
|
pr = component['ports']
|
2023-08-21 18:55:11 +00:00
|
|
|
dm = component['bus_domain']
|
2023-08-01 20:39:09 +00:00
|
|
|
for key in pr:
|
|
|
|
row = nodes.row()
|
2023-08-07 19:31:41 +00:00
|
|
|
self.column_entry(row, key, 'literal')
|
|
|
|
self.column_entry(row, pr[key]['direction'], 'paragraph')
|
|
|
|
self.column_entry(row, pretty_dep(pr[key]['dependency']), 'paragraph')
|
2023-08-21 18:55:11 +00:00
|
|
|
if 'clk' in key or 'clock' in key:
|
|
|
|
domain = 'clock domain'
|
|
|
|
elif 'reset':
|
|
|
|
domain = 'reset signal'
|
|
|
|
else:
|
|
|
|
domain = 'domain'
|
|
|
|
if key in dm:
|
|
|
|
bus = 'Buses' if len(dm[key]) > 1 else 'Bus'
|
|
|
|
plr = 'are' if len(dm[key]) > 1 else 'is'
|
|
|
|
in_domain = f"{bus} ``{'``, ``'.join(dm[key])}`` {plr} synchronous to this {domain}."
|
|
|
|
else:
|
|
|
|
in_domain = ""
|
2023-08-01 20:39:09 +00:00
|
|
|
if key in description:
|
2023-08-21 18:55:11 +00:00
|
|
|
self.column_entry(row, " ".join([description[key], in_domain]), 'reST', classes=['description'])
|
2023-08-01 20:39:09 +00:00
|
|
|
else:
|
2023-08-21 18:55:11 +00:00
|
|
|
self.column_entry(row, in_domain, 'reST', classes=['description'])
|
2023-08-01 20:39:09 +00:00
|
|
|
rows.append(row)
|
|
|
|
|
|
|
|
tbody = nodes.tbody()
|
|
|
|
tbody.extend(rows)
|
|
|
|
tgroup += tbody
|
2023-08-07 19:31:41 +00:00
|
|
|
content += table
|
|
|
|
|
|
|
|
subnode += section
|
2023-08-01 20:39:09 +00:00
|
|
|
|
2023-09-25 20:53:42 +00:00
|
|
|
for tag in description:
|
|
|
|
if tag not in bs and tag not in pr:
|
2023-11-21 12:56:25 +00:00
|
|
|
logger.warning(f"Signal {tag} defined in the directive does not exist in the IP-XACT (component.xml)!")
|
2023-09-25 20:53:42 +00:00
|
|
|
|
2023-12-04 22:57:35 +00:00
|
|
|
return subnode
|
2023-08-01 20:39:09 +00:00
|
|
|
|
|
|
|
def run(self):
|
|
|
|
env = self.state.document.settings.env
|
|
|
|
self.current_doc = env.doc2path(env.docname)
|
|
|
|
|
2023-08-07 19:31:41 +00:00
|
|
|
node = node_div()
|
2023-08-01 20:39:09 +00:00
|
|
|
|
|
|
|
if 'path' in self.options:
|
|
|
|
lib_name = self.options['path']
|
|
|
|
else:
|
|
|
|
lib_name = env.docname.replace('/index', '')
|
|
|
|
|
|
|
|
if lib_name in env.component:
|
2023-08-07 19:31:41 +00:00
|
|
|
self.tables(node, self.content, env.component[lib_name])
|
2023-08-01 20:39:09 +00:00
|
|
|
else:
|
2023-08-07 19:31:41 +00:00
|
|
|
self.tables(node, self.content, None)
|
2023-08-01 20:39:09 +00:00
|
|
|
|
2023-08-07 19:31:41 +00:00
|
|
|
return [ node ]
|
|
|
|
|
|
|
|
class directive_regmap(directive_base):
|
|
|
|
option_spec = {'name': directives.unchanged, 'no-type-info': directives.unchanged}
|
|
|
|
required_arguments = 0
|
|
|
|
optional_arguments = 0
|
|
|
|
|
|
|
|
def tables(self, subnode, obj):
|
|
|
|
section = nodes.section(ids=[f"register-map-{obj['title']}"])
|
|
|
|
title = nodes.title(text=f"{obj['title']} ({obj['title']})")
|
|
|
|
|
|
|
|
section += title
|
|
|
|
content, _ = self.collapsible(section, f"Register map table.")
|
|
|
|
tgroup = nodes.tgroup(cols=7)
|
|
|
|
for _ in range(7):
|
|
|
|
colspec = nodes.colspec(colwidth=1)
|
|
|
|
tgroup.append(colspec)
|
|
|
|
table = nodes.table(classes=['regmap'])
|
|
|
|
table += tgroup
|
|
|
|
|
docs: links, drop part, fixups, codeowners
Drop part role, use generic adi instead for root adi domain links.
For future reference, the snipped used was:
find ./docs/projects -type f -exec sed -i 's/:part:/:adi:/g' {} \;
Drop Containerfile.
Add option to validate links status (e.g. 200, 404), intended mostly for CI
use to check if a page has disappeared from the internet.
Validate links uses coroutines to launch multiple tasks concurrently,
but do it in bundles to avoid being rate limited.
Fixup regmap styling.
Add imoldovan, jmarques, spop, lbarbosa as docs codeowners.
Remove branch field for links to the hdl repo.
Change git role to display full path.
Fixup ZedBoard link label, remove IP List, add SYSID_ROM dokuwiki link
in ad716_sdz project.
Signed-off-by: Jorge Marques <jorge.marques@analog.com>
2023-11-13 15:42:46 +00:00
|
|
|
self.table_header(tgroup, ["DWORD", "BYTE", "BITS", "Name", "Type", "Default Value", "Description"])
|
2023-08-07 19:31:41 +00:00
|
|
|
|
|
|
|
rows = []
|
|
|
|
for reg in obj['regmap']:
|
|
|
|
self.column_entries(rows, [
|
|
|
|
[reg['address'][0], 'literal'],
|
|
|
|
[reg['address'][1], 'literal'],
|
|
|
|
['', 'literal'],
|
|
|
|
[reg['name'], 'literal'],
|
|
|
|
['', 'literal'],
|
|
|
|
['', 'literal'],
|
|
|
|
[reg['description'], 'reST', ['description']],
|
|
|
|
])
|
|
|
|
|
|
|
|
for field in reg['fields']:
|
|
|
|
self.column_entries(rows, [
|
|
|
|
['', 'literal'],
|
|
|
|
['', 'literal'],
|
|
|
|
[f"[{field['bits']}]", 'literal'],
|
|
|
|
[field['name'], 'literal'],
|
docs: links, drop part, fixups, codeowners
Drop part role, use generic adi instead for root adi domain links.
For future reference, the snipped used was:
find ./docs/projects -type f -exec sed -i 's/:part:/:adi:/g' {} \;
Drop Containerfile.
Add option to validate links status (e.g. 200, 404), intended mostly for CI
use to check if a page has disappeared from the internet.
Validate links uses coroutines to launch multiple tasks concurrently,
but do it in bundles to avoid being rate limited.
Fixup regmap styling.
Add imoldovan, jmarques, spop, lbarbosa as docs codeowners.
Remove branch field for links to the hdl repo.
Change git role to display full path.
Fixup ZedBoard link label, remove IP List, add SYSID_ROM dokuwiki link
in ad716_sdz project.
Signed-off-by: Jorge Marques <jorge.marques@analog.com>
2023-11-13 15:42:46 +00:00
|
|
|
[field['rw'], 'literal'],
|
|
|
|
[field['default'], 'default_value', ['default']],
|
2023-08-07 19:31:41 +00:00
|
|
|
[field['description'], 'reST', ['description']],
|
|
|
|
])
|
|
|
|
|
|
|
|
tbody = nodes.tbody()
|
|
|
|
tbody.extend(rows)
|
|
|
|
tgroup += tbody
|
|
|
|
content += table
|
|
|
|
|
|
|
|
subnode += section
|
|
|
|
|
|
|
|
if 'no-type-info' in self.options:
|
2023-12-04 22:57:35 +00:00
|
|
|
return subnode
|
2023-08-01 20:39:09 +00:00
|
|
|
|
2023-08-07 19:31:41 +00:00
|
|
|
tgroup = nodes.tgroup(cols=3)
|
|
|
|
for _ in range(3):
|
|
|
|
colspec = nodes.colspec(colwidth=1)
|
|
|
|
tgroup.append(colspec)
|
|
|
|
table = nodes.table()
|
|
|
|
table += tgroup
|
|
|
|
|
|
|
|
self.table_header(tgroup, ["Access Type", "Name", "Description"])
|
|
|
|
|
|
|
|
rows = []
|
|
|
|
for at in obj['access_type']:
|
|
|
|
self.column_entries(rows, [
|
|
|
|
[at, 'paragraph'],
|
|
|
|
[hdl_strings.access_type[at]['name'], 'paragraph'],
|
|
|
|
[hdl_strings.access_type[at]['description'], 'paragraph']
|
|
|
|
])
|
|
|
|
|
|
|
|
tbody = nodes.tbody()
|
|
|
|
tbody.extend(rows)
|
|
|
|
tgroup += tbody
|
|
|
|
section += table
|
|
|
|
|
2023-12-04 22:57:35 +00:00
|
|
|
return subnode
|
2023-08-07 19:31:41 +00:00
|
|
|
|
|
|
|
def run(self):
|
|
|
|
env = self.state.document.settings.env
|
|
|
|
self.current_doc = env.doc2path(env.docname)
|
|
|
|
if os.getcwd() not in self.current_doc:
|
|
|
|
raise Exception(f"Inconsistent paths, {os.getcwd()} not in {self.current_doc}")
|
|
|
|
owner = self.current_doc[len(os.getcwd())+1:-4]
|
|
|
|
|
|
|
|
node = node_div()
|
|
|
|
|
|
|
|
if 'name' in self.options:
|
|
|
|
lib_name = self.options['name']
|
|
|
|
else:
|
|
|
|
logger.warning("hdl-regmap directive without name option, skipped!")
|
|
|
|
return [ node ]
|
|
|
|
|
|
|
|
subnode = nodes.section(ids=["hdl-regmap"])
|
|
|
|
|
|
|
|
# Have to search all because it is allowed to have more than one regmap per file...
|
|
|
|
file = None
|
|
|
|
for f in env.regmaps:
|
|
|
|
if lib_name in env.regmaps[f]['subregmap']:
|
|
|
|
file = f
|
|
|
|
break
|
|
|
|
|
|
|
|
if file is None:
|
|
|
|
logger.warning(f"Title tool {lib_name} not-found in any regmap file, skipped!")
|
|
|
|
return [ node ]
|
|
|
|
|
|
|
|
if owner not in env.regmaps[f]['owners']:
|
|
|
|
env.regmaps[f]['owners'].append(owner)
|
|
|
|
self.tables(subnode, env.regmaps[f]['subregmap'][lib_name])
|
|
|
|
|
|
|
|
node += subnode
|
2023-08-01 20:39:09 +00:00
|
|
|
return [ node ]
|
|
|
|
|
|
|
|
class directive_parameters(directive_base):
|
2023-08-07 19:31:41 +00:00
|
|
|
option_spec = {'path': directives.unchanged}
|
2023-08-01 20:39:09 +00:00
|
|
|
required_arguments = 0
|
|
|
|
optional_arguments = 0
|
|
|
|
|
|
|
|
def tables(self, content, parameter):
|
|
|
|
description = self.get_descriptions(content)
|
|
|
|
|
|
|
|
if parameter is None:
|
|
|
|
return self.generic_table(description)
|
|
|
|
|
|
|
|
tgroup = nodes.tgroup(cols=5)
|
|
|
|
for _ in range(5):
|
|
|
|
colspec = nodes.colspec(colwidth=1)
|
|
|
|
tgroup.append(colspec)
|
|
|
|
table = nodes.table()
|
|
|
|
table += tgroup
|
|
|
|
|
2023-10-06 12:23:50 +00:00
|
|
|
self.table_header(tgroup, ["Name", "Description", "Data Type", "Default Value", "Choices/Range"])
|
2023-08-01 20:39:09 +00:00
|
|
|
|
|
|
|
rows = []
|
|
|
|
for key in parameter:
|
2023-08-16 12:57:14 +00:00
|
|
|
row = nodes.row()
|
2023-08-07 19:31:41 +00:00
|
|
|
self.column_entry(row, "{:s}".format(key), 'literal')
|
2023-08-16 12:57:14 +00:00
|
|
|
if key in description:
|
2023-08-07 19:31:41 +00:00
|
|
|
self.column_entry(row, description[key], 'reST', classes=['description'])
|
2023-08-16 12:57:14 +00:00
|
|
|
else:
|
2023-08-07 19:31:41 +00:00
|
|
|
self.column_entry(row, dot_fix(parameter[key]['description']), 'paragraph', classes=['description'])
|
2023-10-06 12:23:50 +00:00
|
|
|
for tag, ty in zip(['type', 'default'], ['paragraph', 'literal']):
|
|
|
|
if parameter[key][tag] is not None:
|
|
|
|
self.column_entry(row, parameter[key][tag], ty, classes=[tag])
|
|
|
|
else:
|
|
|
|
logger.warning(f"Got empty {tag} at parameter {key}!")
|
|
|
|
self.column_entry(row, "", 'paragraph')
|
2023-08-01 20:39:09 +00:00
|
|
|
crange = []
|
2023-10-06 12:23:50 +00:00
|
|
|
for tag in ['choices', 'range']:
|
|
|
|
if parameter[key][tag] is not None:
|
|
|
|
crange.append(parameter[key][tag])
|
2023-08-01 20:39:09 +00:00
|
|
|
crange = '. '.join(crange)
|
2023-08-07 19:31:41 +00:00
|
|
|
self.column_entry(row, crange, 'paragraph')
|
2023-08-16 12:57:14 +00:00
|
|
|
|
|
|
|
rows.append(row)
|
|
|
|
|
|
|
|
tbody = nodes.tbody()
|
|
|
|
tbody.extend(rows)
|
|
|
|
tgroup += tbody
|
|
|
|
|
2023-08-01 20:39:09 +00:00
|
|
|
for tag in description:
|
|
|
|
if tag not in parameter:
|
2023-11-21 12:56:25 +00:00
|
|
|
logger.warning(f"{tag} defined in the directive does not exist in the IP-XACT (component.xml)!")
|
2023-08-16 12:57:14 +00:00
|
|
|
|
2023-08-01 20:39:09 +00:00
|
|
|
return table
|
2023-08-16 12:57:14 +00:00
|
|
|
|
|
|
|
def run(self):
|
|
|
|
env = self.state.document.settings.env
|
|
|
|
self.current_doc = env.doc2path(env.docname)
|
|
|
|
|
2023-08-07 19:31:41 +00:00
|
|
|
node = node_div()
|
2023-08-16 12:57:14 +00:00
|
|
|
|
2023-12-04 22:57:35 +00:00
|
|
|
if 'path' not in self.options:
|
|
|
|
self.options['path'] = env.docname.replace('/index', '')
|
|
|
|
lib_name = self.options['path']
|
2023-08-16 12:57:14 +00:00
|
|
|
|
2023-08-01 20:39:09 +00:00
|
|
|
subnode = nodes.section(ids=["hdl-parameters"])
|
|
|
|
if lib_name in env.component:
|
|
|
|
subnode += self.tables(self.content, env.component[lib_name]['parameters'])
|
|
|
|
else:
|
|
|
|
subnode += self.tables(self.content, None)
|
2023-08-16 12:57:14 +00:00
|
|
|
|
2023-08-01 20:39:09 +00:00
|
|
|
node += subnode
|
2023-08-16 12:57:14 +00:00
|
|
|
|
|
|
|
return [ node ]
|
|
|
|
|
2023-12-04 22:57:35 +00:00
|
|
|
class directive_component_diagram(directive_base):
|
|
|
|
option_spec = {'path': directives.unchanged}
|
|
|
|
required_arguments = 0
|
|
|
|
optional_arguments = 0
|
|
|
|
|
|
|
|
def missing_diagram(self):
|
|
|
|
svg_raw = hdl_component.render_placeholder(self.options['path'])
|
|
|
|
|
|
|
|
svg = nodes.raw('', svg_raw, format='html')
|
|
|
|
return [ svg ]
|
|
|
|
|
|
|
|
def diagram(self):
|
|
|
|
name = hdl_component.get_name(self.options['path'])
|
|
|
|
path = '_build/managed'
|
|
|
|
f = open(os.path.join(path, name))
|
|
|
|
svg_raw = f.read()
|
|
|
|
|
|
|
|
svg = nodes.raw('', svg_raw, format='html')
|
|
|
|
return [ svg ]
|
|
|
|
|
|
|
|
def run(self):
|
|
|
|
env = self.state.document.settings.env
|
|
|
|
self.current_doc = env.doc2path(env.docname)
|
|
|
|
|
|
|
|
node = node_div()
|
|
|
|
|
|
|
|
if 'path' not in self.options:
|
|
|
|
self.options['path'] = env.docname.replace('/index', '')
|
|
|
|
lib_name = self.options['path']
|
|
|
|
|
|
|
|
subnode = nodes.section(ids=["hdl-component-diagram"])
|
|
|
|
if lib_name in env.component:
|
|
|
|
subnode += self.diagram()
|
|
|
|
else:
|
|
|
|
subnode += self.missing_diagram()
|
|
|
|
|
|
|
|
node += subnode
|
|
|
|
|
|
|
|
return [ node ]
|
|
|
|
|
|
|
|
|
2023-08-01 20:39:09 +00:00
|
|
|
def parse_hdl_component(path, ctime):
|
|
|
|
component = {
|
|
|
|
'bus_interface':{},
|
2023-08-21 18:55:11 +00:00
|
|
|
'bus_domain':{},
|
2023-08-01 20:39:09 +00:00
|
|
|
'ports': {},
|
|
|
|
'parameters': {},
|
|
|
|
'ctime': ctime
|
|
|
|
}
|
|
|
|
|
2023-08-07 19:31:41 +00:00
|
|
|
def get_namespaces(item):
|
2023-08-01 20:39:09 +00:00
|
|
|
nsmap = item.nsmap
|
|
|
|
for i in ['spirit', 'xilinx', 'xsi']:
|
|
|
|
if i not in nsmap:
|
|
|
|
raise Exception(f"Required namespace {i} not in file!")
|
|
|
|
|
|
|
|
return (nsmap['spirit'], nsmap['xilinx'], nsmap['xsi'])
|
|
|
|
|
2023-08-07 19:31:41 +00:00
|
|
|
def get(item, local_name):
|
2023-08-01 20:39:09 +00:00
|
|
|
items = get_all(item, local_name)
|
|
|
|
if len(items) == 0:
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
return items[0]
|
|
|
|
|
2023-08-07 19:31:41 +00:00
|
|
|
def get_all(item, local_name):
|
2023-08-01 20:39:09 +00:00
|
|
|
template = "/*[local-name()='%s']"
|
|
|
|
if not isinstance(local_name, str):
|
|
|
|
raise Exception("Got wrong type, only Strings are allowed")
|
|
|
|
local_name = local_name.split('/')
|
|
|
|
return item.xpath('.' + ''.join([template % ln for ln in local_name]))
|
|
|
|
|
2023-08-07 19:31:41 +00:00
|
|
|
def sattrib(item, attr):
|
2023-08-01 20:39:09 +00:00
|
|
|
nonlocal spirit
|
|
|
|
return item.get(f"{{{spirit}}}{attr}")
|
|
|
|
|
2023-08-07 19:31:41 +00:00
|
|
|
def xattrib(item, attr):
|
2023-08-01 20:39:09 +00:00
|
|
|
nonlocal xilinx
|
|
|
|
return item.get(f"{{{xilinx}}}{attr}")
|
|
|
|
|
2023-08-07 19:31:41 +00:00
|
|
|
def stag(item):
|
2023-08-01 20:39:09 +00:00
|
|
|
nonlocal spirit
|
|
|
|
return item.tag.replace(f"{{{spirit}}}",'')
|
|
|
|
|
2023-08-07 19:31:41 +00:00
|
|
|
def xtag(item):
|
2023-08-01 20:39:09 +00:00
|
|
|
nonlocal xilinx
|
|
|
|
return item.tag.replace(f"{{{xilinx}}}",'')
|
|
|
|
|
2023-08-07 19:31:41 +00:00
|
|
|
def clean_dependency(string):
|
2023-08-01 20:39:09 +00:00
|
|
|
return string[string.find("'"): string.rfind(')')].replace(')','')
|
|
|
|
|
2023-08-07 19:31:41 +00:00
|
|
|
def get_dependency(item, type_=None):
|
2023-08-01 20:39:09 +00:00
|
|
|
if type_ is None:
|
|
|
|
type_ = stag(item)
|
|
|
|
|
|
|
|
dependency = get(item, f"vendorExtensions/{type_}Info/enablement/isEnabled")
|
|
|
|
if dependency is None:
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
return clean_dependency(xattrib(dependency, 'dependency'))
|
|
|
|
|
2023-08-07 19:31:41 +00:00
|
|
|
def get_range(item):
|
2023-08-01 20:39:09 +00:00
|
|
|
min_ = sattrib(item, 'minimum')
|
|
|
|
max_ = sattrib(item, 'maximum')
|
|
|
|
if max_ == None or min_ == None:
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
return f"From {min_} to {max_}."
|
|
|
|
|
2023-08-07 19:31:41 +00:00
|
|
|
def get_choice_type(name):
|
2023-08-01 20:39:09 +00:00
|
|
|
return name[name.find('_')+1:name.rfind('_')]
|
|
|
|
|
2023-10-06 12:23:50 +00:00
|
|
|
def format_default_value(value, fmt):
|
|
|
|
if fmt == "bitString" and value[0:2].lower() == "0x":
|
|
|
|
return f"'h{value[2:].upper()}"
|
|
|
|
if fmt == "bitString" and value[0] == '"' and value[-1] == '"':
|
|
|
|
return f"'b{value[1:-1]}"
|
|
|
|
if fmt == "bool":
|
|
|
|
return value.title()
|
|
|
|
return value
|
|
|
|
|
2023-08-01 20:39:09 +00:00
|
|
|
root = etree.parse(path).getroot()
|
|
|
|
spirit, xilinx, _ = get_namespaces(root)
|
|
|
|
vendor = get(root, 'vendor').text
|
|
|
|
name = get(root, 'name').text
|
|
|
|
|
|
|
|
bs = component['bus_interface']
|
2023-08-21 18:55:11 +00:00
|
|
|
dm = component['bus_domain']
|
2023-08-01 20:39:09 +00:00
|
|
|
for bus_interface in get_all(root, 'busInterfaces/busInterface'):
|
|
|
|
bus_name = get(bus_interface, 'name').text
|
2023-08-21 18:55:11 +00:00
|
|
|
if '_signal_clock' in bus_name:
|
|
|
|
signal_name = get(get(bus_interface, 'portMaps/portMap'), 'physicalPort/name').text
|
|
|
|
if signal_name not in dm:
|
|
|
|
dm[signal_name] = []
|
|
|
|
dm[signal_name].append(bus_name[0:bus_name.find('_signal_clock')])
|
|
|
|
continue
|
|
|
|
if '_signal_reset' in bus_name:
|
|
|
|
signal_name = get(get(bus_interface, 'portMaps/portMap'), 'physicalPort/name').text
|
|
|
|
if signal_name not in dm:
|
|
|
|
dm[signal_name] = []
|
|
|
|
dm[signal_name].append(bus_name[0:bus_name.find('_signal_reset')])
|
|
|
|
continue
|
|
|
|
|
2023-12-04 22:57:35 +00:00
|
|
|
if get(bus_interface, 'slave') is not None:
|
|
|
|
bus_role = 'slave'
|
|
|
|
elif get(bus_interface, 'master') is not None:
|
|
|
|
bus_role = 'master'
|
|
|
|
else:
|
|
|
|
bus_role = None
|
|
|
|
|
2023-08-01 20:39:09 +00:00
|
|
|
bs[bus_name] = {
|
|
|
|
'name': sattrib(get(bus_interface, 'busType'), 'name'),
|
2023-12-04 22:57:35 +00:00
|
|
|
'role': bus_role,
|
2023-08-01 20:39:09 +00:00
|
|
|
'dependency': get_dependency(bus_interface, 'busInterface'),
|
|
|
|
'port_map': {}
|
|
|
|
}
|
|
|
|
|
|
|
|
pm = bs[bus_name]['port_map']
|
|
|
|
for port_map in get_all(bus_interface, 'portMaps/portMap'):
|
|
|
|
pm[get(port_map, 'physicalPort/name').text] = {
|
|
|
|
'logical_port': get(port_map, 'logicalPort/name').text,
|
2023-08-07 19:31:41 +00:00
|
|
|
'direction': ''
|
2023-08-01 20:39:09 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
lport = component['ports']
|
|
|
|
for port in get_all(root, 'model/ports/port'):
|
|
|
|
port_name = get(port, 'name').text
|
|
|
|
port_direction = get(port, 'wire/direction').text
|
|
|
|
|
|
|
|
found = False
|
|
|
|
for bus in bs:
|
|
|
|
if port_name in bs[bus]['port_map']:
|
|
|
|
found = True
|
|
|
|
bs[bus]['port_map'][port_name]['direction'] = port_direction
|
|
|
|
break;
|
|
|
|
|
|
|
|
if found == False:
|
|
|
|
lport[port_name] = {
|
|
|
|
'direction': port_direction,
|
|
|
|
'dependency': get_dependency(port, 'port')
|
|
|
|
}
|
|
|
|
pr = component['parameters']
|
|
|
|
for parameter in get_all(root, 'parameters/parameter'):
|
|
|
|
param_description = get(parameter, 'displayName')
|
|
|
|
if param_description is not None:
|
|
|
|
param_name = get(parameter, 'name').text
|
|
|
|
param_value = get(parameter, 'value')
|
2023-10-06 12:23:50 +00:00
|
|
|
param_format = sattrib(param_value, 'format')
|
2023-08-01 20:39:09 +00:00
|
|
|
pr[param_name] = {
|
|
|
|
'description': param_description.text,
|
2023-10-06 12:23:50 +00:00
|
|
|
'default': format_default_value(param_value.text, param_format),
|
|
|
|
'type': param_format,
|
2023-08-01 20:39:09 +00:00
|
|
|
'_choice_ref': sattrib(param_value, 'choiceRef'),
|
|
|
|
'choices': None,
|
|
|
|
'range': get_range(param_value)
|
|
|
|
}
|
|
|
|
|
2023-10-06 12:23:50 +00:00
|
|
|
for parameter in get_all(root, 'model/modelParameters/modelParameter'):
|
|
|
|
param_name = get(parameter, 'name').text
|
|
|
|
param_type = sattrib(parameter, 'dataType')
|
|
|
|
if param_type == "std_logic_vector":
|
|
|
|
param_type = "logic vector"
|
|
|
|
if param_type is not None and param_name in pr:
|
|
|
|
if pr[param_name]['type'] is None:
|
|
|
|
pr[param_name]['type'] = param_type.capitalize()
|
|
|
|
else:
|
|
|
|
param_format = pr[param_name]['type']
|
|
|
|
pr[param_name]['type'] = param_format[0].upper()+param_format[1:]
|
|
|
|
|
2023-08-01 20:39:09 +00:00
|
|
|
for choice in get_all(root, 'choices/choice'):
|
|
|
|
name = get(choice, 'name').text
|
|
|
|
for key in pr:
|
|
|
|
if pr[key]['_choice_ref'] == name:
|
|
|
|
type_ = get_choice_type(name)
|
|
|
|
values = get_all(choice, 'enumeration')
|
|
|
|
string = []
|
|
|
|
if type_ == 'pairs':
|
|
|
|
for v in values:
|
|
|
|
string.append(f"{sattrib(v, 'text')} ({v.text})")
|
|
|
|
elif type_ == 'list':
|
|
|
|
for v in values:
|
|
|
|
string.append(v.text)
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
pr[key]['choices'] = ', '.join(string)
|
|
|
|
break
|
|
|
|
|
|
|
|
return component
|
|
|
|
|
2023-08-07 19:31:41 +00:00
|
|
|
def manage_hdl_components(env, docnames, libraries):
|
2023-08-01 20:39:09 +00:00
|
|
|
if not hasattr(env, 'component'):
|
|
|
|
env.component = {}
|
|
|
|
cp = env.component
|
|
|
|
for lib in list(cp):
|
|
|
|
f = f"../{lib}/component.xml"
|
|
|
|
if not os.path.isfile(f):
|
|
|
|
del cp[lib]
|
|
|
|
|
|
|
|
for lib, doc in libraries:
|
|
|
|
f = f"../{lib}/component.xml"
|
|
|
|
if not os.path.isfile(f):
|
|
|
|
continue
|
|
|
|
ctime = os.path.getctime(f)
|
|
|
|
if lib in cp and cp[lib]['ctime'] >= ctime:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
cp[lib] = parse_hdl_component(f, ctime)
|
2023-12-04 22:57:35 +00:00
|
|
|
hdl_component.render(env, lib, cp[lib])
|
2023-08-01 20:39:09 +00:00
|
|
|
docnames.append(doc)
|
|
|
|
|
2023-08-07 19:31:41 +00:00
|
|
|
# From https://github.com/tfcollins/vger/blob/main/vger/hdl_reg_map.py
|
|
|
|
def parse_hdl_regmap(reg, ctime):
|
|
|
|
regmap = {
|
|
|
|
'subregmap': {},
|
|
|
|
'owners':[],
|
|
|
|
'ctime': ctime
|
|
|
|
}
|
|
|
|
|
|
|
|
with open(f"regmap/adi_regmap_{reg}.txt", "r") as f:
|
|
|
|
data = f.readlines()
|
|
|
|
data = [d.replace("\n", "") for d in data]
|
|
|
|
|
|
|
|
while "TITLE" in data:
|
|
|
|
# Get title
|
|
|
|
tit = data.index("TITLE")
|
|
|
|
|
|
|
|
title = str(data[tit + 1].strip())
|
|
|
|
title_tool = str(data[tit + 2].strip())
|
|
|
|
data = data[tit + 2 :]
|
|
|
|
|
|
|
|
if 'ENDTITLE' in [title_tool, title]:
|
|
|
|
logger.warning(f"Malformed title fields at file regmap/adi_regmap_{reg}.txt, skipped!")
|
|
|
|
continue
|
|
|
|
|
|
|
|
regmap['subregmap'][title_tool] = {
|
|
|
|
'title': title,
|
|
|
|
'regmap': [],
|
|
|
|
'access_type': []
|
|
|
|
}
|
|
|
|
|
|
|
|
# Get registers
|
|
|
|
access_type = []
|
|
|
|
while "REG" in data:
|
|
|
|
regi = data.index("REG")
|
|
|
|
rfi = data.index("ENDREG")
|
|
|
|
|
|
|
|
if not regi:
|
|
|
|
break
|
|
|
|
|
|
|
|
reg_addr = data[regi + 1].strip()
|
|
|
|
reg_name = data[regi + 2].strip()
|
|
|
|
reg_desc = [data[fi].strip() for fi in range(regi + 3, rfi)]
|
|
|
|
reg_desc = " ".join(reg_desc)
|
|
|
|
|
|
|
|
with contextlib.suppress(ValueError):
|
|
|
|
if tet := data.index("TITLE"):
|
|
|
|
if regi > tet:
|
|
|
|
# into next regmap
|
|
|
|
break
|
|
|
|
data = data[regi + 1 :]
|
|
|
|
|
|
|
|
# Get fields
|
|
|
|
fields = []
|
|
|
|
while "FIELD" in data:
|
|
|
|
fi = data.index("FIELD")
|
|
|
|
efi = data.index("ENDFIELD")
|
|
|
|
|
|
|
|
if not fi:
|
|
|
|
break
|
|
|
|
|
|
|
|
with contextlib.suppress(ValueError):
|
|
|
|
if rege := data.index("REG"):
|
|
|
|
if fi > rege:
|
|
|
|
# into next register
|
|
|
|
break
|
|
|
|
|
|
|
|
field_loc = data[fi + 1].strip()
|
|
|
|
field_loc = field_loc.split(" ")
|
|
|
|
field_bits = field_loc[0].replace("[", "").replace("]", "")
|
docs: links, drop part, fixups, codeowners
Drop part role, use generic adi instead for root adi domain links.
For future reference, the snipped used was:
find ./docs/projects -type f -exec sed -i 's/:part:/:adi:/g' {} \;
Drop Containerfile.
Add option to validate links status (e.g. 200, 404), intended mostly for CI
use to check if a page has disappeared from the internet.
Validate links uses coroutines to launch multiple tasks concurrently,
but do it in bundles to avoid being rate limited.
Fixup regmap styling.
Add imoldovan, jmarques, spop, lbarbosa as docs codeowners.
Remove branch field for links to the hdl repo.
Change git role to display full path.
Fixup ZedBoard link label, remove IP List, add SYSID_ROM dokuwiki link
in ad716_sdz project.
Signed-off-by: Jorge Marques <jorge.marques@analog.com>
2023-11-13 15:42:46 +00:00
|
|
|
field_default = ' '.join(field_loc[1:]) if len(field_loc) > 1 else "NA"
|
2023-08-07 19:31:41 +00:00
|
|
|
|
|
|
|
field_name = data[fi + 2].strip()
|
|
|
|
field_rw = data[fi + 3].strip()
|
|
|
|
|
|
|
|
if field_rw == 'R':
|
|
|
|
field_rw = 'RO'
|
|
|
|
elif field_rw == 'W':
|
|
|
|
field_rw = 'WO'
|
|
|
|
if '-V' in field_rw:
|
|
|
|
if 'V' not in access_type:
|
|
|
|
access_type.append('V')
|
|
|
|
field_rw_ = field_rw.replace('-V','')
|
|
|
|
if field_rw_ not in access_type:
|
|
|
|
if field_rw_ not in hdl_strings.access_type:
|
|
|
|
logger.warning(f"Malformed access type {field_rw} for register {field_name}, file regmap/adi_regmap_{reg}.txt.")
|
|
|
|
else:
|
|
|
|
access_type.append(field_rw)
|
|
|
|
|
|
|
|
field_desc = [data[fi].strip() for fi in range(fi + 4, efi)]
|
|
|
|
field_desc = " ".join(field_desc)
|
|
|
|
|
docs: links, drop part, fixups, codeowners
Drop part role, use generic adi instead for root adi domain links.
For future reference, the snipped used was:
find ./docs/projects -type f -exec sed -i 's/:part:/:adi:/g' {} \;
Drop Containerfile.
Add option to validate links status (e.g. 200, 404), intended mostly for CI
use to check if a page has disappeared from the internet.
Validate links uses coroutines to launch multiple tasks concurrently,
but do it in bundles to avoid being rate limited.
Fixup regmap styling.
Add imoldovan, jmarques, spop, lbarbosa as docs codeowners.
Remove branch field for links to the hdl repo.
Change git role to display full path.
Fixup ZedBoard link label, remove IP List, add SYSID_ROM dokuwiki link
in ad716_sdz project.
Signed-off-by: Jorge Marques <jorge.marques@analog.com>
2023-11-13 15:42:46 +00:00
|
|
|
# TODO Remove dokuwiki scaping support
|
|
|
|
# Temporary dokuwiki scaping convert to not break current dokuwiki tables
|
|
|
|
field_default = field_default.replace("''", "``")
|
|
|
|
field_desc = field_desc.replace("''", "``")
|
|
|
|
|
2023-08-07 19:31:41 +00:00
|
|
|
fields.append(
|
|
|
|
{
|
|
|
|
"name": field_name,
|
|
|
|
"bits": field_bits,
|
|
|
|
"default": field_default,
|
|
|
|
"rw": field_rw,
|
|
|
|
"description": field_desc,
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
data = data[fi + 1 :]
|
|
|
|
|
|
|
|
try:
|
|
|
|
if '+' in reg_addr:
|
|
|
|
reg_addr_ = reg_addr.split('+')
|
|
|
|
reg_addr_[0] = int(reg_addr_[0], 16)
|
|
|
|
reg_addr_[1] = int(reg_addr_[1].replace('*n',''), 16)
|
|
|
|
reg_addr_dword = f"{hex(reg_addr_[0])} + {hex(reg_addr_[1])}*n"
|
|
|
|
reg_addr_byte = f"{hex(reg_addr_[0]<<2)} + {hex(reg_addr_[1]<<2)}*n"
|
|
|
|
else:
|
|
|
|
reg_addr_ = int(reg_addr, 16)
|
|
|
|
reg_addr_dword = f"{hex(reg_addr_)}"
|
|
|
|
reg_addr_byte = f"{hex(reg_addr_<<2)}"
|
|
|
|
except:
|
|
|
|
logger.warning(f"Got malformed register address {reg_addr} for register {reg_name}, file regmap/adi_regmap_{reg}.txt.")
|
|
|
|
reg_addr_dword = ""
|
|
|
|
reg_addr_byte = ""
|
|
|
|
|
|
|
|
regmap['subregmap'][title_tool]['regmap'].append(
|
|
|
|
{
|
|
|
|
'name': reg_name,
|
|
|
|
'address': [reg_addr_dword, reg_addr_byte],
|
|
|
|
'description': reg_desc,
|
|
|
|
'fields': fields
|
|
|
|
}
|
|
|
|
)
|
|
|
|
regmap['subregmap'][title_tool]['access_type'] = access_type
|
|
|
|
return regmap
|
|
|
|
|
|
|
|
def manage_hdl_regmaps(env, docnames):
|
|
|
|
if not hasattr(env, 'regmaps'):
|
|
|
|
env.regmaps = {}
|
|
|
|
|
|
|
|
rm = env.regmaps
|
|
|
|
for lib in list(rm):
|
|
|
|
f = f"regmap/adi_regmap_{lib}.txt"
|
|
|
|
if not os.path.isfile(f):
|
|
|
|
del rm[lib]
|
|
|
|
# Inconsistent naming convention, need to parse all in directory.
|
2023-11-21 12:56:25 +00:00
|
|
|
regmaps = []
|
2023-08-07 19:31:41 +00:00
|
|
|
for (dirpath, dirnames, filenames) in os.walk("regmap"):
|
2023-11-21 12:56:25 +00:00
|
|
|
for file in filenames:
|
|
|
|
m = re.search("adi_regmap_(\w+)\.txt", file)
|
|
|
|
if not bool(m):
|
|
|
|
continue
|
|
|
|
|
|
|
|
reg_name = m.group(1)
|
|
|
|
regmaps.extend(reg_name)
|
|
|
|
|
|
|
|
ctime = os.path.getctime(f"regmap/{file}")
|
|
|
|
if reg_name in rm and rm[reg_name]['ctime'] < ctime:
|
|
|
|
for o in rm[reg_name]['owners']:
|
|
|
|
if o not in docnames:
|
|
|
|
docnames.append(o)
|
|
|
|
if reg_name in rm and rm[reg_name]['ctime'] >= ctime:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
rm[reg_name] = parse_hdl_regmap(reg_name, ctime)
|
2023-08-07 19:31:41 +00:00
|
|
|
|
|
|
|
def manage_hdl_artifacts(app, env, docnames):
|
|
|
|
libraries = [[k.replace('/index',''), k] for k in env.found_docs if k.find('library/') == 0]
|
|
|
|
|
|
|
|
manage_hdl_components(env, docnames, libraries)
|
|
|
|
manage_hdl_regmaps(env, docnames)
|
|
|
|
|
2023-08-16 12:57:14 +00:00
|
|
|
def setup(app):
|
2023-08-21 18:55:11 +00:00
|
|
|
app.add_directive('collapsible', directive_collapsible)
|
2023-08-16 12:57:14 +00:00
|
|
|
app.add_directive('hdl-parameters', directive_parameters)
|
2023-12-04 22:57:35 +00:00
|
|
|
app.add_directive('hdl-component-diagram', directive_component_diagram)
|
2023-08-01 20:39:09 +00:00
|
|
|
app.add_directive('hdl-interfaces', directive_interfaces)
|
2023-08-07 19:31:41 +00:00
|
|
|
app.add_directive('hdl-regmap', directive_regmap)
|
|
|
|
|
|
|
|
for node in [node_div, node_input, node_label, node_icon]:
|
|
|
|
app.add_node(node,
|
|
|
|
html =(node.visit, node.depart),
|
|
|
|
latex=(node.visit, node.depart),
|
|
|
|
text =(node.visit, node.depart))
|
|
|
|
|
|
|
|
app.connect('env-before-read-docs', manage_hdl_artifacts)
|
2023-08-16 12:57:14 +00:00
|
|
|
|
2023-08-07 19:31:41 +00:00
|
|
|
app.add_config_value('hide_collapsible_content', dft_hide_collapsible_content, 'env')
|
2023-08-01 20:39:09 +00:00
|
|
|
|
2023-08-16 12:57:14 +00:00
|
|
|
return {
|
|
|
|
'version': '0.1',
|
|
|
|
'parallel_read_safe': True,
|
|
|
|
'parallel_write_safe': True,
|
|
|
|
}
|