docs: General improvements
Import aiohttp and asyncio only when needed. Better warning for unknown signals, params. Use pattern matching in regmap parsing. Fixup bundle count. Add lists clarification to guidelines. Enforce #1229 rules. Clean-up Makefile. Use non-breaking hyphen. Signed-off-by: Jorge Marques <jorge.marques@analog.com>main
parent
39b2a2b8bb
commit
9f4d5ff71f
|
@ -5,72 +5,9 @@ SOURCEDIR = .
|
|||
BUILDDIR = _build
|
||||
|
||||
help:
|
||||
@echo "For help about the container instance, do \`make container-help\`"
|
||||
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
|
||||
.PHONY: help Makefile
|
||||
|
||||
%: Makefile
|
||||
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# Container commands
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
CONTAINER_IMG ?=hdl-docs
|
||||
CONTAINER_ENGINE =podman
|
||||
CONTAINER_REPO =${CONTAINER_IMG}
|
||||
CONTAINER_IMAGE =${CONTAINER_IMG}_build
|
||||
CONTAINER_VERSION ?=0.1
|
||||
CONTAINER_SHELL ?=bash -l
|
||||
CONTAINER_RUN_EXTRA ?=
|
||||
CONTAINER_FORMAT =$(if $(filter podman,${CONTAINER_ENGINE}),--format docker,)
|
||||
|
||||
IF_CONTAINER_RUNS=$(shell ${CONTAINER_ENFINE} container inspect -f '{{.State.Running}}' ${CONTAINER_IMAGE} 2>/dev/null)
|
||||
|
||||
CONTAINER_RUN_PARAMS = -it --rm --name=${CONTAINER_IMG} \
|
||||
--name=${CONTAINER_IMAGE} \
|
||||
--workdir=/${CONTAINER_IMG} \
|
||||
--mount type=bind,source=${CURDIR}/../,target=/${CONTAINER_IMG} \
|
||||
${CONTAINER_RUN_EXTRA} \
|
||||
${CONTAINER_REPO}/${CONTAINER_IMAGE}:${CONTAINER_VERSION}
|
||||
|
||||
container-login:
|
||||
@if [ "${IF_CONTAINER_RUNS}" != "true" ]; then \
|
||||
${CONTAINER_ENGINE} run ${CONTAINER_RUN_PARAMS} \
|
||||
${CONTAINER_SHELL}; \
|
||||
else \
|
||||
${CONTAINER_ENGINE} exec -it ${CONTAINER_IMAGE} \
|
||||
${CONTAINER_SHELL}; \
|
||||
fi
|
||||
|
||||
container-build:
|
||||
@printf "Building container image\n"
|
||||
@${CONTAINER_ENGINE} build --no-cache=true ${CONTAINER_FORMAT} -t ${CONTAINER_REPO}/${CONTAINER_IMAGE}:${CONTAINER_VERSION} -f ./Containerfile .
|
||||
|
||||
|
||||
container-html:
|
||||
@${CONTAINER_ENGINE} run ${CONTAINER_RUN_PARAMS} ${CONTAINER_SHELL} -c \
|
||||
"source /.venv/bin/activate ; cd docs ; make html"
|
||||
|
||||
container-pdf:
|
||||
@${CONTAINER_ENGINE} run ${CONTAINER_RUN_PARAMS} ${CONTAINER_SHELL} -c \
|
||||
"source /.venv/bin/activate ; cd docs ; make latexpdf"
|
||||
|
||||
container-clean:
|
||||
@${CONTAINER_ENGINE} run ${CONTAINER_RUN_PARAMS} ${CONTAINER_SHELL} -c \
|
||||
"source /.venv/bin/activate ; cd docs ; make clean"
|
||||
|
||||
.PHONY: container-build container-login container-html container-pdf container-clean
|
||||
|
||||
container-help:
|
||||
@printf "The container commands allow to use a container to build the documentation.\n"
|
||||
@printf "Both podman and docker are supported, change the CONTAINER_ENGINE variable to select which to use.\n\n"
|
||||
@printf "Usage: make [options]\n"
|
||||
@printf "\
|
||||
Options:\n\
|
||||
container-build Build the container image with dependencies (do once).\n\
|
||||
container-login Access the container shell.\n\
|
||||
container-html Build html documentation.\n\
|
||||
container-latexpdf Build pdf documentation.\n\
|
||||
\n"
|
||||
|
|
|
@ -4,6 +4,8 @@
|
|||
###############################################################################
|
||||
|
||||
import os.path
|
||||
import contextlib
|
||||
import re
|
||||
from docutils import nodes
|
||||
from docutils.statemachine import ViewList
|
||||
from docutils.parsers.rst import Directive, directives
|
||||
|
@ -13,7 +15,6 @@ from lxml import etree
|
|||
from adi_hdl_static import hdl_strings
|
||||
from uuid import uuid4
|
||||
from hashlib import sha1
|
||||
import contextlib
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -340,7 +341,7 @@ class directive_interfaces(directive_base):
|
|||
|
||||
for tag in description:
|
||||
if tag not in bs and tag not in pr:
|
||||
logger.warning(f"Signal {tag} defined in the directive does not exist in the source code!")
|
||||
logger.warning(f"Signal {tag} defined in the directive does not exist in the IP-XACT (component.xml)!")
|
||||
|
||||
return subnode
|
||||
|
||||
|
@ -523,7 +524,7 @@ class directive_parameters(directive_base):
|
|||
|
||||
for tag in description:
|
||||
if tag not in parameter:
|
||||
logger.warning(f"{tag} defined in the directive does not exist in the source code!")
|
||||
logger.warning(f"{tag} defined in the directive does not exist in the IP-XACT (component.xml)!")
|
||||
|
||||
return table
|
||||
|
||||
|
@ -893,13 +894,17 @@ def manage_hdl_regmaps(env, docnames):
|
|||
if not os.path.isfile(f):
|
||||
del rm[lib]
|
||||
# Inconsistent naming convention, need to parse all in directory.
|
||||
files = []
|
||||
regmaps = []
|
||||
for (dirpath, dirnames, filenames) in os.walk("regmap"):
|
||||
files.extend(filenames)
|
||||
break
|
||||
regmaps = [f.replace('adi_regmap_','').replace('.txt','') for f in files]
|
||||
for reg_name in regmaps:
|
||||
ctime = os.path.getctime(f"regmap/adi_regmap_{reg_name}.txt")
|
||||
for file in filenames:
|
||||
m = re.search("adi_regmap_(\w+)\.txt", file)
|
||||
if not bool(m):
|
||||
continue
|
||||
|
||||
reg_name = m.group(1)
|
||||
regmaps.extend(reg_name)
|
||||
|
||||
ctime = os.path.getctime(f"regmap/{file}")
|
||||
if reg_name in rm and rm[reg_name]['ctime'] < ctime:
|
||||
for o in rm[reg_name]['owners']:
|
||||
if o not in docnames:
|
||||
|
|
|
@ -3,11 +3,9 @@
|
|||
### SPDX short identifier: ADIBSD
|
||||
###############################################################################
|
||||
|
||||
import subprocess
|
||||
from docutils import nodes
|
||||
from sphinx.util import logging
|
||||
import subprocess
|
||||
import asyncio
|
||||
import aiohttp
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
validate_links_user_agent = 'Status resolver (Python/Sphinx)'
|
||||
|
@ -151,6 +149,10 @@ def validate_links(app, env):
|
|||
logger.info(f"Skipping {len(env.links)} URLs checks-ups. Set validate_links to True to enable this.")
|
||||
return
|
||||
|
||||
global asyncio, aiohttp
|
||||
import asyncio
|
||||
import aiohttp
|
||||
|
||||
asyncio.run(
|
||||
async_validate_links(app, env)
|
||||
)
|
||||
|
@ -177,7 +179,7 @@ async def async_validate_links(app, env):
|
|||
step = 25
|
||||
|
||||
links = list(env.links)
|
||||
leng = total%step+2 if total%step != 0 else total%step+1
|
||||
leng = int(total/step)+1 if total%step != 0 else int(total/step)
|
||||
for i in range(0, leng):
|
||||
cur = i*step
|
||||
end = total if (i+1)*step > total else (i+1)*step
|
||||
|
|
|
@ -591,7 +591,7 @@ the following order:
|
|||
* - Size
|
||||
- Name
|
||||
- Description
|
||||
* - 32-bit
|
||||
* - 32‑bit
|
||||
- flags
|
||||
- | This field includes 2 control bits:
|
||||
|
||||
|
@ -600,25 +600,25 @@ the following order:
|
|||
next DMA descriptor pointed to by ``next_sg_addr`` will be loaded.
|
||||
* bit1: if set, an end-of-transfer interrupt will be raised after the
|
||||
memory segment pointed to by this descriptor has been transferred.
|
||||
* - 32-bit
|
||||
* - 32‑bit
|
||||
- id
|
||||
- This field corresponds to an identifier of the descriptor.
|
||||
* - 64-bit
|
||||
* - 64‑bit
|
||||
- dest_addr
|
||||
- This field contains the destination address of the transfer.
|
||||
* - 64-bit
|
||||
* - 64‑bit
|
||||
- src_addr
|
||||
- This field contains the source address of the transfer.
|
||||
* - 64-bit
|
||||
* - 64‑bit
|
||||
- next_sg_addr
|
||||
- This field contains the address of the next descriptor.
|
||||
* - 32-bit
|
||||
* - 32‑bit
|
||||
- y_len
|
||||
- This field contains the number of rows to transfer, minus one.
|
||||
* - 32-bit
|
||||
* - 32‑bit
|
||||
- x_len
|
||||
- This field contains the number of bytes to transfer, minus one.
|
||||
* - 32-bit
|
||||
* - 32‑bit
|
||||
- src_stride
|
||||
- This field contains the number of bytes between the start of one row and
|
||||
the next row for the source address.
|
||||
|
|
|
@ -87,19 +87,22 @@ Block design
|
|||
Block diagram
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
\**\* MUST HAVE \**\* The data path and clock domains are depicted in
|
||||
the below diagram:
|
||||
|
||||
\*\* TIP: upload the .svg file for the diagram to have high quality \*\*
|
||||
|
||||
If the project has multiple ways of configuration, then make subsections to
|
||||
this section and show the default configuration and some other popular modes.
|
||||
|
||||
\**\* KEEP THIS PARAGRAPH \**\*
|
||||
The data path and clock domains are depicted in the below diagram:
|
||||
|
||||
.. image:: ../ad9783_ebz/ad9783_zcu102_block_diagram.svg
|
||||
:width: 800
|
||||
:align: center
|
||||
:alt: AD9783-EBZ/ZCU102 block diagram
|
||||
|
||||
\*\* MUST: Use SVG format for the diagram \*\*
|
||||
|
||||
\*\* TIP: Block diagrams should contain subtitles only if there are at least two
|
||||
different diagrams \*\*
|
||||
|
||||
Configuration modes
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
@ -146,8 +149,7 @@ Clock scheme
|
|||
- SYSREF clocks are LVDS
|
||||
- ADCCLK and REFCLK are LVPECL
|
||||
|
||||
\*\* ADD IMAGE IF APPLIES! TIP: upload the .svg file for the diagram to have
|
||||
high quality \*\*
|
||||
\*\* ADD IMAGE IF APPLIES! MUST: Use SVG format \*\*
|
||||
|
||||
**\*DESCRIBE OTHER COMPONENTS FROM THE PROJECT, EX: SYNCHRONA**\ \*
|
||||
|
||||
|
@ -185,6 +187,7 @@ chains are merged together and transferred to the DDR with a single DMA.
|
|||
CPU/Memory interconnects addresses
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
\**\* KEEP THIS PARAGRAPH \**\*
|
||||
The addresses are dependent on the architecture of the FPGA, having an offset
|
||||
added to the base address from HDL (see more at :ref:`architecture`).
|
||||
|
||||
|
@ -197,6 +200,8 @@ some IPs are instatiated and some are not.
|
|||
|
||||
Check-out the table below to find out the conditions.
|
||||
|
||||
\*\* MUST: Hexadecimal addresses are written in caps and separated by an underscore. \*\*
|
||||
|
||||
==================== ================================= =============== =========== ============
|
||||
Instance Depends on parameter Zynq/Microblaze ZynqMP Versal
|
||||
==================== ================================= =============== =========== ============
|
||||
|
@ -293,6 +298,9 @@ GPIOs
|
|||
- 117:86
|
||||
- 141:110
|
||||
|
||||
\*\* MUST: GPIOs should be listed in descending order and should have the number
|
||||
of bits specified next to their name \*\*
|
||||
|
||||
Interrupts
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
sphinx
|
||||
regex
|
||||
lxml
|
||||
furo
|
||||
wavedrom
|
||||
|
|
|
@ -60,9 +60,6 @@ table.regmap {
|
|||
table.regmap .caption-text{
|
||||
font-size: 1rem;
|
||||
}
|
||||
td {
|
||||
min-width: 4em;
|
||||
}
|
||||
th, td.type {
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
|
|
@ -128,6 +128,32 @@ an option, you shall remove it and export in the *grid* table format.
|
|||
Now you only have to adjust the widths and give the final touches, like using
|
||||
the correct directives and roles.
|
||||
|
||||
Lists
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
Unordered lists use ``*`` or ``-`` and ordered lists ``#.``.
|
||||
|
||||
Child items must be aligned with the first letter of the parent item, that means,
|
||||
2 spaces for unordered list and 3 spaces for ordered lists, for example:
|
||||
|
||||
.. code-block:: rst
|
||||
|
||||
#. Parent ordered item.
|
||||
|
||||
* Child unordeded item.
|
||||
|
||||
#. Child ordered item.
|
||||
#. Child ordered item.
|
||||
|
||||
Renders as:
|
||||
|
||||
#. Parent numbered item.
|
||||
|
||||
* Child unordered item.
|
||||
|
||||
#. Child ordered item.
|
||||
#. Child ordered item.
|
||||
|
||||
Code
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
|
|
Loading…
Reference in New Issue