mirror of
https://xff.cz/git/u-boot/
synced 2025-09-01 08:42:12 +02:00
doc: Remove duplicated documentation directory
Commit ad7061ed74
("doc: Move device tree bindings documentation to
doc/device-tree-bindings") moved all device tree binding documentation
to doc/device-tree-bindings directory.
The current U-Boot project still have two documentation directories:
- doc/
- Documentation/
Move all documentation and sphinx files to doc directory so all content
can be in a common place.
Signed-off-by: Breno Lima <breno.lima@nxp.com>
This commit is contained in:
committed by
Tom Rini
parent
894e235f14
commit
656d8da9d2
165
doc/sphinx/cdomain.py
Normal file
165
doc/sphinx/cdomain.py
Normal file
@@ -0,0 +1,165 @@
|
||||
# -*- coding: utf-8; mode: python -*-
|
||||
# pylint: disable=W0141,C0113,C0103,C0325
|
||||
u"""
|
||||
cdomain
|
||||
~~~~~~~
|
||||
|
||||
Replacement for the sphinx c-domain.
|
||||
|
||||
:copyright: Copyright (C) 2016 Markus Heiser
|
||||
:license: GPL Version 2, June 1991 see Linux/COPYING for details.
|
||||
|
||||
List of customizations:
|
||||
|
||||
* Moved the *duplicate C object description* warnings for function
|
||||
declarations in the nitpicky mode. See Sphinx documentation for
|
||||
the config values for ``nitpick`` and ``nitpick_ignore``.
|
||||
|
||||
* Add option 'name' to the "c:function:" directive. With option 'name' the
|
||||
ref-name of a function can be modified. E.g.::
|
||||
|
||||
.. c:function:: int ioctl( int fd, int request )
|
||||
:name: VIDIOC_LOG_STATUS
|
||||
|
||||
The func-name (e.g. ioctl) remains in the output but the ref-name changed
|
||||
from 'ioctl' to 'VIDIOC_LOG_STATUS'. The function is referenced by::
|
||||
|
||||
* :c:func:`VIDIOC_LOG_STATUS` or
|
||||
* :any:`VIDIOC_LOG_STATUS` (``:any:`` needs sphinx 1.3)
|
||||
|
||||
* Handle signatures of function-like macros well. Don't try to deduce
|
||||
arguments types of function-like macros.
|
||||
|
||||
"""
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.parsers.rst import directives
|
||||
|
||||
import sphinx
|
||||
from sphinx import addnodes
|
||||
from sphinx.domains.c import c_funcptr_sig_re, c_sig_re
|
||||
from sphinx.domains.c import CObject as Base_CObject
|
||||
from sphinx.domains.c import CDomain as Base_CDomain
|
||||
|
||||
__version__ = '1.0'
|
||||
|
||||
# Get Sphinx version
|
||||
major, minor, patch = sphinx.version_info[:3]
|
||||
|
||||
def setup(app):
|
||||
|
||||
app.override_domain(CDomain)
|
||||
|
||||
return dict(
|
||||
version = __version__,
|
||||
parallel_read_safe = True,
|
||||
parallel_write_safe = True
|
||||
)
|
||||
|
||||
class CObject(Base_CObject):
|
||||
|
||||
"""
|
||||
Description of a C language object.
|
||||
"""
|
||||
option_spec = {
|
||||
"name" : directives.unchanged
|
||||
}
|
||||
|
||||
def handle_func_like_macro(self, sig, signode):
|
||||
u"""Handles signatures of function-like macros.
|
||||
|
||||
If the objtype is 'function' and the the signature ``sig`` is a
|
||||
function-like macro, the name of the macro is returned. Otherwise
|
||||
``False`` is returned. """
|
||||
|
||||
if not self.objtype == 'function':
|
||||
return False
|
||||
|
||||
m = c_funcptr_sig_re.match(sig)
|
||||
if m is None:
|
||||
m = c_sig_re.match(sig)
|
||||
if m is None:
|
||||
raise ValueError('no match')
|
||||
|
||||
rettype, fullname, arglist, _const = m.groups()
|
||||
arglist = arglist.strip()
|
||||
if rettype or not arglist:
|
||||
return False
|
||||
|
||||
arglist = arglist.replace('`', '').replace('\\ ', '') # remove markup
|
||||
arglist = [a.strip() for a in arglist.split(",")]
|
||||
|
||||
# has the first argument a type?
|
||||
if len(arglist[0].split(" ")) > 1:
|
||||
return False
|
||||
|
||||
# This is a function-like macro, it's arguments are typeless!
|
||||
signode += addnodes.desc_name(fullname, fullname)
|
||||
paramlist = addnodes.desc_parameterlist()
|
||||
signode += paramlist
|
||||
|
||||
for argname in arglist:
|
||||
param = addnodes.desc_parameter('', '', noemph=True)
|
||||
# separate by non-breaking space in the output
|
||||
param += nodes.emphasis(argname, argname)
|
||||
paramlist += param
|
||||
|
||||
return fullname
|
||||
|
||||
def handle_signature(self, sig, signode):
|
||||
"""Transform a C signature into RST nodes."""
|
||||
|
||||
fullname = self.handle_func_like_macro(sig, signode)
|
||||
if not fullname:
|
||||
fullname = super(CObject, self).handle_signature(sig, signode)
|
||||
|
||||
if "name" in self.options:
|
||||
if self.objtype == 'function':
|
||||
fullname = self.options["name"]
|
||||
else:
|
||||
# FIXME: handle :name: value of other declaration types?
|
||||
pass
|
||||
return fullname
|
||||
|
||||
def add_target_and_index(self, name, sig, signode):
|
||||
# for C API items we add a prefix since names are usually not qualified
|
||||
# by a module name and so easily clash with e.g. section titles
|
||||
targetname = 'c.' + name
|
||||
if targetname not in self.state.document.ids:
|
||||
signode['names'].append(targetname)
|
||||
signode['ids'].append(targetname)
|
||||
signode['first'] = (not self.names)
|
||||
self.state.document.note_explicit_target(signode)
|
||||
inv = self.env.domaindata['c']['objects']
|
||||
if (name in inv and self.env.config.nitpicky):
|
||||
if self.objtype == 'function':
|
||||
if ('c:func', name) not in self.env.config.nitpick_ignore:
|
||||
self.state_machine.reporter.warning(
|
||||
'duplicate C object description of %s, ' % name +
|
||||
'other instance in ' + self.env.doc2path(inv[name][0]),
|
||||
line=self.lineno)
|
||||
inv[name] = (self.env.docname, self.objtype)
|
||||
|
||||
indextext = self.get_index_text(name)
|
||||
if indextext:
|
||||
if major == 1 and minor < 4:
|
||||
# indexnode's tuple changed in 1.4
|
||||
# https://github.com/sphinx-doc/sphinx/commit/e6a5a3a92e938fcd75866b4227db9e0524d58f7c
|
||||
self.indexnode['entries'].append(
|
||||
('single', indextext, targetname, ''))
|
||||
else:
|
||||
self.indexnode['entries'].append(
|
||||
('single', indextext, targetname, '', None))
|
||||
|
||||
class CDomain(Base_CDomain):
|
||||
|
||||
"""C language domain."""
|
||||
name = 'c'
|
||||
label = 'C'
|
||||
directives = {
|
||||
'function': CObject,
|
||||
'member': CObject,
|
||||
'macro': CObject,
|
||||
'type': CObject,
|
||||
'var': CObject,
|
||||
}
|
190
doc/sphinx/kernel_include.py
Executable file
190
doc/sphinx/kernel_include.py
Executable file
@@ -0,0 +1,190 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8; mode: python -*-
|
||||
# pylint: disable=R0903, C0330, R0914, R0912, E0401
|
||||
|
||||
u"""
|
||||
kernel-include
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
Implementation of the ``kernel-include`` reST-directive.
|
||||
|
||||
:copyright: Copyright (C) 2016 Markus Heiser
|
||||
:license: GPL Version 2, June 1991 see linux/COPYING for details.
|
||||
|
||||
The ``kernel-include`` reST-directive is a replacement for the ``include``
|
||||
directive. The ``kernel-include`` directive expand environment variables in
|
||||
the path name and allows to include files from arbitrary locations.
|
||||
|
||||
.. hint::
|
||||
|
||||
Including files from arbitrary locations (e.g. from ``/etc``) is a
|
||||
security risk for builders. This is why the ``include`` directive from
|
||||
docutils *prohibit* pathnames pointing to locations *above* the filesystem
|
||||
tree where the reST document with the include directive is placed.
|
||||
|
||||
Substrings of the form $name or ${name} are replaced by the value of
|
||||
environment variable name. Malformed variable names and references to
|
||||
non-existing variables are left unchanged.
|
||||
"""
|
||||
|
||||
# ==============================================================================
|
||||
# imports
|
||||
# ==============================================================================
|
||||
|
||||
import os.path
|
||||
|
||||
from docutils import io, nodes, statemachine
|
||||
from docutils.utils.error_reporting import SafeString, ErrorString
|
||||
from docutils.parsers.rst import directives
|
||||
from docutils.parsers.rst.directives.body import CodeBlock, NumberLines
|
||||
from docutils.parsers.rst.directives.misc import Include
|
||||
|
||||
__version__ = '1.0'
|
||||
|
||||
# ==============================================================================
|
||||
def setup(app):
|
||||
# ==============================================================================
|
||||
|
||||
app.add_directive("kernel-include", KernelInclude)
|
||||
return dict(
|
||||
version = __version__,
|
||||
parallel_read_safe = True,
|
||||
parallel_write_safe = True
|
||||
)
|
||||
|
||||
# ==============================================================================
|
||||
class KernelInclude(Include):
|
||||
# ==============================================================================
|
||||
|
||||
u"""KernelInclude (``kernel-include``) directive"""
|
||||
|
||||
def run(self):
|
||||
path = os.path.realpath(
|
||||
os.path.expandvars(self.arguments[0]))
|
||||
|
||||
# to get a bit security back, prohibit /etc:
|
||||
if path.startswith(os.sep + "etc"):
|
||||
raise self.severe(
|
||||
'Problems with "%s" directive, prohibited path: %s'
|
||||
% (self.name, path))
|
||||
|
||||
self.arguments[0] = path
|
||||
|
||||
#return super(KernelInclude, self).run() # won't work, see HINTs in _run()
|
||||
return self._run()
|
||||
|
||||
def _run(self):
|
||||
"""Include a file as part of the content of this reST file."""
|
||||
|
||||
# HINT: I had to copy&paste the whole Include.run method. I'am not happy
|
||||
# with this, but due to security reasons, the Include.run method does
|
||||
# not allow absolute or relative pathnames pointing to locations *above*
|
||||
# the filesystem tree where the reST document is placed.
|
||||
|
||||
if not self.state.document.settings.file_insertion_enabled:
|
||||
raise self.warning('"%s" directive disabled.' % self.name)
|
||||
source = self.state_machine.input_lines.source(
|
||||
self.lineno - self.state_machine.input_offset - 1)
|
||||
source_dir = os.path.dirname(os.path.abspath(source))
|
||||
path = directives.path(self.arguments[0])
|
||||
if path.startswith('<') and path.endswith('>'):
|
||||
path = os.path.join(self.standard_include_path, path[1:-1])
|
||||
path = os.path.normpath(os.path.join(source_dir, path))
|
||||
|
||||
# HINT: this is the only line I had to change / commented out:
|
||||
#path = utils.relative_path(None, path)
|
||||
|
||||
path = nodes.reprunicode(path)
|
||||
encoding = self.options.get(
|
||||
'encoding', self.state.document.settings.input_encoding)
|
||||
e_handler=self.state.document.settings.input_encoding_error_handler
|
||||
tab_width = self.options.get(
|
||||
'tab-width', self.state.document.settings.tab_width)
|
||||
try:
|
||||
self.state.document.settings.record_dependencies.add(path)
|
||||
include_file = io.FileInput(source_path=path,
|
||||
encoding=encoding,
|
||||
error_handler=e_handler)
|
||||
except UnicodeEncodeError as error:
|
||||
raise self.severe('Problems with "%s" directive path:\n'
|
||||
'Cannot encode input file path "%s" '
|
||||
'(wrong locale?).' %
|
||||
(self.name, SafeString(path)))
|
||||
except IOError as error:
|
||||
raise self.severe('Problems with "%s" directive path:\n%s.' %
|
||||
(self.name, ErrorString(error)))
|
||||
startline = self.options.get('start-line', None)
|
||||
endline = self.options.get('end-line', None)
|
||||
try:
|
||||
if startline or (endline is not None):
|
||||
lines = include_file.readlines()
|
||||
rawtext = ''.join(lines[startline:endline])
|
||||
else:
|
||||
rawtext = include_file.read()
|
||||
except UnicodeError as error:
|
||||
raise self.severe('Problem with "%s" directive:\n%s' %
|
||||
(self.name, ErrorString(error)))
|
||||
# start-after/end-before: no restrictions on newlines in match-text,
|
||||
# and no restrictions on matching inside lines vs. line boundaries
|
||||
after_text = self.options.get('start-after', None)
|
||||
if after_text:
|
||||
# skip content in rawtext before *and incl.* a matching text
|
||||
after_index = rawtext.find(after_text)
|
||||
if after_index < 0:
|
||||
raise self.severe('Problem with "start-after" option of "%s" '
|
||||
'directive:\nText not found.' % self.name)
|
||||
rawtext = rawtext[after_index + len(after_text):]
|
||||
before_text = self.options.get('end-before', None)
|
||||
if before_text:
|
||||
# skip content in rawtext after *and incl.* a matching text
|
||||
before_index = rawtext.find(before_text)
|
||||
if before_index < 0:
|
||||
raise self.severe('Problem with "end-before" option of "%s" '
|
||||
'directive:\nText not found.' % self.name)
|
||||
rawtext = rawtext[:before_index]
|
||||
|
||||
include_lines = statemachine.string2lines(rawtext, tab_width,
|
||||
convert_whitespace=True)
|
||||
if 'literal' in self.options:
|
||||
# Convert tabs to spaces, if `tab_width` is positive.
|
||||
if tab_width >= 0:
|
||||
text = rawtext.expandtabs(tab_width)
|
||||
else:
|
||||
text = rawtext
|
||||
literal_block = nodes.literal_block(rawtext, source=path,
|
||||
classes=self.options.get('class', []))
|
||||
literal_block.line = 1
|
||||
self.add_name(literal_block)
|
||||
if 'number-lines' in self.options:
|
||||
try:
|
||||
startline = int(self.options['number-lines'] or 1)
|
||||
except ValueError:
|
||||
raise self.error(':number-lines: with non-integer '
|
||||
'start value')
|
||||
endline = startline + len(include_lines)
|
||||
if text.endswith('\n'):
|
||||
text = text[:-1]
|
||||
tokens = NumberLines([([], text)], startline, endline)
|
||||
for classes, value in tokens:
|
||||
if classes:
|
||||
literal_block += nodes.inline(value, value,
|
||||
classes=classes)
|
||||
else:
|
||||
literal_block += nodes.Text(value, value)
|
||||
else:
|
||||
literal_block += nodes.Text(text, text)
|
||||
return [literal_block]
|
||||
if 'code' in self.options:
|
||||
self.options['source'] = path
|
||||
codeblock = CodeBlock(self.name,
|
||||
[self.options.pop('code')], # arguments
|
||||
self.options,
|
||||
include_lines, # content
|
||||
self.lineno,
|
||||
self.content_offset,
|
||||
self.block_text,
|
||||
self.state,
|
||||
self.state_machine)
|
||||
return codeblock.run()
|
||||
self.state_machine.insert_input(include_lines, path)
|
||||
return []
|
146
doc/sphinx/kerneldoc.py
Normal file
146
doc/sphinx/kerneldoc.py
Normal file
@@ -0,0 +1,146 @@
|
||||
# coding=utf-8
|
||||
#
|
||||
# Copyright © 2016 Intel Corporation
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a
|
||||
# copy of this software and associated documentation files (the "Software"),
|
||||
# to deal in the Software without restriction, including without limitation
|
||||
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
# and/or sell copies of the Software, and to permit persons to whom the
|
||||
# Software is furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice (including the next
|
||||
# paragraph) shall be included in all copies or substantial portions of the
|
||||
# Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
||||
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
# IN THE SOFTWARE.
|
||||
#
|
||||
# Authors:
|
||||
# Jani Nikula <jani.nikula@intel.com>
|
||||
#
|
||||
# Please make sure this works on both python2 and python3.
|
||||
#
|
||||
|
||||
import codecs
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import re
|
||||
import glob
|
||||
|
||||
from docutils import nodes, statemachine
|
||||
from docutils.statemachine import ViewList
|
||||
from docutils.parsers.rst import directives, Directive
|
||||
from sphinx.ext.autodoc import AutodocReporter
|
||||
|
||||
__version__ = '1.0'
|
||||
|
||||
class KernelDocDirective(Directive):
|
||||
"""Extract kernel-doc comments from the specified file"""
|
||||
required_argument = 1
|
||||
optional_arguments = 4
|
||||
option_spec = {
|
||||
'doc': directives.unchanged_required,
|
||||
'functions': directives.unchanged_required,
|
||||
'export': directives.unchanged,
|
||||
'internal': directives.unchanged,
|
||||
}
|
||||
has_content = False
|
||||
|
||||
def run(self):
|
||||
env = self.state.document.settings.env
|
||||
cmd = [env.config.kerneldoc_bin, '-rst', '-enable-lineno']
|
||||
|
||||
filename = env.config.kerneldoc_srctree + '/' + self.arguments[0]
|
||||
export_file_patterns = []
|
||||
|
||||
# Tell sphinx of the dependency
|
||||
env.note_dependency(os.path.abspath(filename))
|
||||
|
||||
tab_width = self.options.get('tab-width', self.state.document.settings.tab_width)
|
||||
|
||||
# FIXME: make this nicer and more robust against errors
|
||||
if 'export' in self.options:
|
||||
cmd += ['-export']
|
||||
export_file_patterns = str(self.options.get('export')).split()
|
||||
elif 'internal' in self.options:
|
||||
cmd += ['-internal']
|
||||
export_file_patterns = str(self.options.get('internal')).split()
|
||||
elif 'doc' in self.options:
|
||||
cmd += ['-function', str(self.options.get('doc'))]
|
||||
elif 'functions' in self.options:
|
||||
for f in str(self.options.get('functions')).split():
|
||||
cmd += ['-function', f]
|
||||
|
||||
for pattern in export_file_patterns:
|
||||
for f in glob.glob(env.config.kerneldoc_srctree + '/' + pattern):
|
||||
env.note_dependency(os.path.abspath(f))
|
||||
cmd += ['-export-file', f]
|
||||
|
||||
cmd += [filename]
|
||||
|
||||
try:
|
||||
env.app.verbose('calling kernel-doc \'%s\'' % (" ".join(cmd)))
|
||||
|
||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
out, err = p.communicate()
|
||||
|
||||
out, err = codecs.decode(out, 'utf-8'), codecs.decode(err, 'utf-8')
|
||||
|
||||
if p.returncode != 0:
|
||||
sys.stderr.write(err)
|
||||
|
||||
env.app.warn('kernel-doc \'%s\' failed with return code %d' % (" ".join(cmd), p.returncode))
|
||||
return [nodes.error(None, nodes.paragraph(text = "kernel-doc missing"))]
|
||||
elif env.config.kerneldoc_verbosity > 0:
|
||||
sys.stderr.write(err)
|
||||
|
||||
lines = statemachine.string2lines(out, tab_width, convert_whitespace=True)
|
||||
result = ViewList()
|
||||
|
||||
lineoffset = 0;
|
||||
line_regex = re.compile("^#define LINENO ([0-9]+)$")
|
||||
for line in lines:
|
||||
match = line_regex.search(line)
|
||||
if match:
|
||||
# sphinx counts lines from 0
|
||||
lineoffset = int(match.group(1)) - 1
|
||||
# we must eat our comments since the upset the markup
|
||||
else:
|
||||
result.append(line, filename, lineoffset)
|
||||
lineoffset += 1
|
||||
|
||||
node = nodes.section()
|
||||
buf = self.state.memo.title_styles, self.state.memo.section_level, self.state.memo.reporter
|
||||
self.state.memo.reporter = AutodocReporter(result, self.state.memo.reporter)
|
||||
self.state.memo.title_styles, self.state.memo.section_level = [], 0
|
||||
try:
|
||||
self.state.nested_parse(result, 0, node, match_titles=1)
|
||||
finally:
|
||||
self.state.memo.title_styles, self.state.memo.section_level, self.state.memo.reporter = buf
|
||||
|
||||
return node.children
|
||||
|
||||
except Exception as e: # pylint: disable=W0703
|
||||
env.app.warn('kernel-doc \'%s\' processing failed with: %s' %
|
||||
(" ".join(cmd), str(e)))
|
||||
return [nodes.error(None, nodes.paragraph(text = "kernel-doc missing"))]
|
||||
|
||||
def setup(app):
|
||||
app.add_config_value('kerneldoc_bin', None, 'env')
|
||||
app.add_config_value('kerneldoc_srctree', None, 'env')
|
||||
app.add_config_value('kerneldoc_verbosity', 1, 'env')
|
||||
|
||||
app.add_directive('kernel-doc', KernelDocDirective)
|
||||
|
||||
return dict(
|
||||
version = __version__,
|
||||
parallel_read_safe = True,
|
||||
parallel_write_safe = True
|
||||
)
|
551
doc/sphinx/kfigure.py
Normal file
551
doc/sphinx/kfigure.py
Normal file
@@ -0,0 +1,551 @@
|
||||
# -*- coding: utf-8; mode: python -*-
|
||||
# pylint: disable=C0103, R0903, R0912, R0915
|
||||
u"""
|
||||
scalable figure and image handling
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Sphinx extension which implements scalable image handling.
|
||||
|
||||
:copyright: Copyright (C) 2016 Markus Heiser
|
||||
:license: GPL Version 2, June 1991 see Linux/COPYING for details.
|
||||
|
||||
The build for image formats depend on image's source format and output's
|
||||
destination format. This extension implement methods to simplify image
|
||||
handling from the author's POV. Directives like ``kernel-figure`` implement
|
||||
methods *to* always get the best output-format even if some tools are not
|
||||
installed. For more details take a look at ``convert_image(...)`` which is
|
||||
the core of all conversions.
|
||||
|
||||
* ``.. kernel-image``: for image handling / a ``.. image::`` replacement
|
||||
|
||||
* ``.. kernel-figure``: for figure handling / a ``.. figure::`` replacement
|
||||
|
||||
* ``.. kernel-render``: for render markup / a concept to embed *render*
|
||||
markups (or languages). Supported markups (see ``RENDER_MARKUP_EXT``)
|
||||
|
||||
- ``DOT``: render embedded Graphviz's **DOC**
|
||||
- ``SVG``: render embedded Scalable Vector Graphics (**SVG**)
|
||||
- ... *developable*
|
||||
|
||||
Used tools:
|
||||
|
||||
* ``dot(1)``: Graphviz (http://www.graphviz.org). If Graphviz is not
|
||||
available, the DOT language is inserted as literal-block.
|
||||
|
||||
* SVG to PDF: To generate PDF, you need at least one of this tools:
|
||||
|
||||
- ``convert(1)``: ImageMagick (https://www.imagemagick.org)
|
||||
|
||||
List of customizations:
|
||||
|
||||
* generate PDF from SVG / used by PDF (LaTeX) builder
|
||||
|
||||
* generate SVG (html-builder) and PDF (latex-builder) from DOT files.
|
||||
DOT: see http://www.graphviz.org/content/dot-language
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
from os import path
|
||||
import subprocess
|
||||
from hashlib import sha1
|
||||
import sys
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.statemachine import ViewList
|
||||
from docutils.parsers.rst import directives
|
||||
from docutils.parsers.rst.directives import images
|
||||
import sphinx
|
||||
|
||||
from sphinx.util.nodes import clean_astext
|
||||
from six import iteritems
|
||||
|
||||
PY3 = sys.version_info[0] == 3
|
||||
|
||||
if PY3:
|
||||
_unicode = str
|
||||
else:
|
||||
_unicode = unicode
|
||||
|
||||
# Get Sphinx version
|
||||
major, minor, patch = sphinx.version_info[:3]
|
||||
if major == 1 and minor > 3:
|
||||
# patches.Figure only landed in Sphinx 1.4
|
||||
from sphinx.directives.patches import Figure # pylint: disable=C0413
|
||||
else:
|
||||
Figure = images.Figure
|
||||
|
||||
__version__ = '1.0.0'
|
||||
|
||||
# simple helper
|
||||
# -------------
|
||||
|
||||
def which(cmd):
|
||||
"""Searches the ``cmd`` in the ``PATH`` environment.
|
||||
|
||||
This *which* searches the PATH for executable ``cmd`` . First match is
|
||||
returned, if nothing is found, ``None` is returned.
|
||||
"""
|
||||
envpath = os.environ.get('PATH', None) or os.defpath
|
||||
for folder in envpath.split(os.pathsep):
|
||||
fname = folder + os.sep + cmd
|
||||
if path.isfile(fname):
|
||||
return fname
|
||||
|
||||
def mkdir(folder, mode=0o775):
|
||||
if not path.isdir(folder):
|
||||
os.makedirs(folder, mode)
|
||||
|
||||
def file2literal(fname):
|
||||
with open(fname, "r") as src:
|
||||
data = src.read()
|
||||
node = nodes.literal_block(data, data)
|
||||
return node
|
||||
|
||||
def isNewer(path1, path2):
|
||||
"""Returns True if ``path1`` is newer than ``path2``
|
||||
|
||||
If ``path1`` exists and is newer than ``path2`` the function returns
|
||||
``True`` is returned otherwise ``False``
|
||||
"""
|
||||
return (path.exists(path1)
|
||||
and os.stat(path1).st_ctime > os.stat(path2).st_ctime)
|
||||
|
||||
def pass_handle(self, node): # pylint: disable=W0613
|
||||
pass
|
||||
|
||||
# setup conversion tools and sphinx extension
|
||||
# -------------------------------------------
|
||||
|
||||
# Graphviz's dot(1) support
|
||||
dot_cmd = None
|
||||
|
||||
# ImageMagick' convert(1) support
|
||||
convert_cmd = None
|
||||
|
||||
|
||||
def setup(app):
|
||||
# check toolchain first
|
||||
app.connect('builder-inited', setupTools)
|
||||
|
||||
# image handling
|
||||
app.add_directive("kernel-image", KernelImage)
|
||||
app.add_node(kernel_image,
|
||||
html = (visit_kernel_image, pass_handle),
|
||||
latex = (visit_kernel_image, pass_handle),
|
||||
texinfo = (visit_kernel_image, pass_handle),
|
||||
text = (visit_kernel_image, pass_handle),
|
||||
man = (visit_kernel_image, pass_handle), )
|
||||
|
||||
# figure handling
|
||||
app.add_directive("kernel-figure", KernelFigure)
|
||||
app.add_node(kernel_figure,
|
||||
html = (visit_kernel_figure, pass_handle),
|
||||
latex = (visit_kernel_figure, pass_handle),
|
||||
texinfo = (visit_kernel_figure, pass_handle),
|
||||
text = (visit_kernel_figure, pass_handle),
|
||||
man = (visit_kernel_figure, pass_handle), )
|
||||
|
||||
# render handling
|
||||
app.add_directive('kernel-render', KernelRender)
|
||||
app.add_node(kernel_render,
|
||||
html = (visit_kernel_render, pass_handle),
|
||||
latex = (visit_kernel_render, pass_handle),
|
||||
texinfo = (visit_kernel_render, pass_handle),
|
||||
text = (visit_kernel_render, pass_handle),
|
||||
man = (visit_kernel_render, pass_handle), )
|
||||
|
||||
app.connect('doctree-read', add_kernel_figure_to_std_domain)
|
||||
|
||||
return dict(
|
||||
version = __version__,
|
||||
parallel_read_safe = True,
|
||||
parallel_write_safe = True
|
||||
)
|
||||
|
||||
|
||||
def setupTools(app):
|
||||
u"""
|
||||
Check available build tools and log some *verbose* messages.
|
||||
|
||||
This function is called once, when the builder is initiated.
|
||||
"""
|
||||
global dot_cmd, convert_cmd # pylint: disable=W0603
|
||||
app.verbose("kfigure: check installed tools ...")
|
||||
|
||||
dot_cmd = which('dot')
|
||||
convert_cmd = which('convert')
|
||||
|
||||
if dot_cmd:
|
||||
app.verbose("use dot(1) from: " + dot_cmd)
|
||||
else:
|
||||
app.warn("dot(1) not found, for better output quality install "
|
||||
"graphviz from http://www.graphviz.org")
|
||||
if convert_cmd:
|
||||
app.verbose("use convert(1) from: " + convert_cmd)
|
||||
else:
|
||||
app.warn(
|
||||
"convert(1) not found, for SVG to PDF conversion install "
|
||||
"ImageMagick (https://www.imagemagick.org)")
|
||||
|
||||
|
||||
# integrate conversion tools
|
||||
# --------------------------
|
||||
|
||||
RENDER_MARKUP_EXT = {
|
||||
# The '.ext' must be handled by convert_image(..) function's *in_ext* input.
|
||||
# <name> : <.ext>
|
||||
'DOT' : '.dot',
|
||||
'SVG' : '.svg'
|
||||
}
|
||||
|
||||
def convert_image(img_node, translator, src_fname=None):
|
||||
"""Convert a image node for the builder.
|
||||
|
||||
Different builder prefer different image formats, e.g. *latex* builder
|
||||
prefer PDF while *html* builder prefer SVG format for images.
|
||||
|
||||
This function handles output image formats in dependence of source the
|
||||
format (of the image) and the translator's output format.
|
||||
"""
|
||||
app = translator.builder.app
|
||||
|
||||
fname, in_ext = path.splitext(path.basename(img_node['uri']))
|
||||
if src_fname is None:
|
||||
src_fname = path.join(translator.builder.srcdir, img_node['uri'])
|
||||
if not path.exists(src_fname):
|
||||
src_fname = path.join(translator.builder.outdir, img_node['uri'])
|
||||
|
||||
dst_fname = None
|
||||
|
||||
# in kernel builds, use 'make SPHINXOPTS=-v' to see verbose messages
|
||||
|
||||
app.verbose('assert best format for: ' + img_node['uri'])
|
||||
|
||||
if in_ext == '.dot':
|
||||
|
||||
if not dot_cmd:
|
||||
app.verbose("dot from graphviz not available / include DOT raw.")
|
||||
img_node.replace_self(file2literal(src_fname))
|
||||
|
||||
elif translator.builder.format == 'latex':
|
||||
dst_fname = path.join(translator.builder.outdir, fname + '.pdf')
|
||||
img_node['uri'] = fname + '.pdf'
|
||||
img_node['candidates'] = {'*': fname + '.pdf'}
|
||||
|
||||
|
||||
elif translator.builder.format == 'html':
|
||||
dst_fname = path.join(
|
||||
translator.builder.outdir,
|
||||
translator.builder.imagedir,
|
||||
fname + '.svg')
|
||||
img_node['uri'] = path.join(
|
||||
translator.builder.imgpath, fname + '.svg')
|
||||
img_node['candidates'] = {
|
||||
'*': path.join(translator.builder.imgpath, fname + '.svg')}
|
||||
|
||||
else:
|
||||
# all other builder formats will include DOT as raw
|
||||
img_node.replace_self(file2literal(src_fname))
|
||||
|
||||
elif in_ext == '.svg':
|
||||
|
||||
if translator.builder.format == 'latex':
|
||||
if convert_cmd is None:
|
||||
app.verbose("no SVG to PDF conversion available / include SVG raw.")
|
||||
img_node.replace_self(file2literal(src_fname))
|
||||
else:
|
||||
dst_fname = path.join(translator.builder.outdir, fname + '.pdf')
|
||||
img_node['uri'] = fname + '.pdf'
|
||||
img_node['candidates'] = {'*': fname + '.pdf'}
|
||||
|
||||
if dst_fname:
|
||||
# the builder needs not to copy one more time, so pop it if exists.
|
||||
translator.builder.images.pop(img_node['uri'], None)
|
||||
_name = dst_fname[len(translator.builder.outdir) + 1:]
|
||||
|
||||
if isNewer(dst_fname, src_fname):
|
||||
app.verbose("convert: {out}/%s already exists and is newer" % _name)
|
||||
|
||||
else:
|
||||
ok = False
|
||||
mkdir(path.dirname(dst_fname))
|
||||
|
||||
if in_ext == '.dot':
|
||||
app.verbose('convert DOT to: {out}/' + _name)
|
||||
ok = dot2format(app, src_fname, dst_fname)
|
||||
|
||||
elif in_ext == '.svg':
|
||||
app.verbose('convert SVG to: {out}/' + _name)
|
||||
ok = svg2pdf(app, src_fname, dst_fname)
|
||||
|
||||
if not ok:
|
||||
img_node.replace_self(file2literal(src_fname))
|
||||
|
||||
|
||||
def dot2format(app, dot_fname, out_fname):
|
||||
"""Converts DOT file to ``out_fname`` using ``dot(1)``.
|
||||
|
||||
* ``dot_fname`` pathname of the input DOT file, including extension ``.dot``
|
||||
* ``out_fname`` pathname of the output file, including format extension
|
||||
|
||||
The *format extension* depends on the ``dot`` command (see ``man dot``
|
||||
option ``-Txxx``). Normally you will use one of the following extensions:
|
||||
|
||||
- ``.ps`` for PostScript,
|
||||
- ``.svg`` or ``svgz`` for Structured Vector Graphics,
|
||||
- ``.fig`` for XFIG graphics and
|
||||
- ``.png`` or ``gif`` for common bitmap graphics.
|
||||
|
||||
"""
|
||||
out_format = path.splitext(out_fname)[1][1:]
|
||||
cmd = [dot_cmd, '-T%s' % out_format, dot_fname]
|
||||
exit_code = 42
|
||||
|
||||
with open(out_fname, "w") as out:
|
||||
exit_code = subprocess.call(cmd, stdout = out)
|
||||
if exit_code != 0:
|
||||
app.warn("Error #%d when calling: %s" % (exit_code, " ".join(cmd)))
|
||||
return bool(exit_code == 0)
|
||||
|
||||
def svg2pdf(app, svg_fname, pdf_fname):
|
||||
"""Converts SVG to PDF with ``convert(1)`` command.
|
||||
|
||||
Uses ``convert(1)`` from ImageMagick (https://www.imagemagick.org) for
|
||||
conversion. Returns ``True`` on success and ``False`` if an error occurred.
|
||||
|
||||
* ``svg_fname`` pathname of the input SVG file with extension (``.svg``)
|
||||
* ``pdf_name`` pathname of the output PDF file with extension (``.pdf``)
|
||||
|
||||
"""
|
||||
cmd = [convert_cmd, svg_fname, pdf_fname]
|
||||
# use stdout and stderr from parent
|
||||
exit_code = subprocess.call(cmd)
|
||||
if exit_code != 0:
|
||||
app.warn("Error #%d when calling: %s" % (exit_code, " ".join(cmd)))
|
||||
return bool(exit_code == 0)
|
||||
|
||||
|
||||
# image handling
|
||||
# ---------------------
|
||||
|
||||
def visit_kernel_image(self, node): # pylint: disable=W0613
|
||||
"""Visitor of the ``kernel_image`` Node.
|
||||
|
||||
Handles the ``image`` child-node with the ``convert_image(...)``.
|
||||
"""
|
||||
img_node = node[0]
|
||||
convert_image(img_node, self)
|
||||
|
||||
class kernel_image(nodes.image):
|
||||
"""Node for ``kernel-image`` directive."""
|
||||
pass
|
||||
|
||||
class KernelImage(images.Image):
|
||||
u"""KernelImage directive
|
||||
|
||||
Earns everything from ``.. image::`` directive, except *remote URI* and
|
||||
*glob* pattern. The KernelImage wraps a image node into a
|
||||
kernel_image node. See ``visit_kernel_image``.
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
uri = self.arguments[0]
|
||||
if uri.endswith('.*') or uri.find('://') != -1:
|
||||
raise self.severe(
|
||||
'Error in "%s: %s": glob pattern and remote images are not allowed'
|
||||
% (self.name, uri))
|
||||
result = images.Image.run(self)
|
||||
if len(result) == 2 or isinstance(result[0], nodes.system_message):
|
||||
return result
|
||||
(image_node,) = result
|
||||
# wrap image node into a kernel_image node / see visitors
|
||||
node = kernel_image('', image_node)
|
||||
return [node]
|
||||
|
||||
# figure handling
|
||||
# ---------------------
|
||||
|
||||
def visit_kernel_figure(self, node): # pylint: disable=W0613
|
||||
"""Visitor of the ``kernel_figure`` Node.
|
||||
|
||||
Handles the ``image`` child-node with the ``convert_image(...)``.
|
||||
"""
|
||||
img_node = node[0][0]
|
||||
convert_image(img_node, self)
|
||||
|
||||
class kernel_figure(nodes.figure):
|
||||
"""Node for ``kernel-figure`` directive."""
|
||||
|
||||
class KernelFigure(Figure):
|
||||
u"""KernelImage directive
|
||||
|
||||
Earns everything from ``.. figure::`` directive, except *remote URI* and
|
||||
*glob* pattern. The KernelFigure wraps a figure node into a kernel_figure
|
||||
node. See ``visit_kernel_figure``.
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
uri = self.arguments[0]
|
||||
if uri.endswith('.*') or uri.find('://') != -1:
|
||||
raise self.severe(
|
||||
'Error in "%s: %s":'
|
||||
' glob pattern and remote images are not allowed'
|
||||
% (self.name, uri))
|
||||
result = Figure.run(self)
|
||||
if len(result) == 2 or isinstance(result[0], nodes.system_message):
|
||||
return result
|
||||
(figure_node,) = result
|
||||
# wrap figure node into a kernel_figure node / see visitors
|
||||
node = kernel_figure('', figure_node)
|
||||
return [node]
|
||||
|
||||
|
||||
# render handling
|
||||
# ---------------------
|
||||
|
||||
def visit_kernel_render(self, node):
|
||||
"""Visitor of the ``kernel_render`` Node.
|
||||
|
||||
If rendering tools available, save the markup of the ``literal_block`` child
|
||||
node into a file and replace the ``literal_block`` node with a new created
|
||||
``image`` node, pointing to the saved markup file. Afterwards, handle the
|
||||
image child-node with the ``convert_image(...)``.
|
||||
"""
|
||||
app = self.builder.app
|
||||
srclang = node.get('srclang')
|
||||
|
||||
app.verbose('visit kernel-render node lang: "%s"' % (srclang))
|
||||
|
||||
tmp_ext = RENDER_MARKUP_EXT.get(srclang, None)
|
||||
if tmp_ext is None:
|
||||
app.warn('kernel-render: "%s" unknown / include raw.' % (srclang))
|
||||
return
|
||||
|
||||
if not dot_cmd and tmp_ext == '.dot':
|
||||
app.verbose("dot from graphviz not available / include raw.")
|
||||
return
|
||||
|
||||
literal_block = node[0]
|
||||
|
||||
code = literal_block.astext()
|
||||
hashobj = code.encode('utf-8') # str(node.attributes)
|
||||
fname = path.join('%s-%s' % (srclang, sha1(hashobj).hexdigest()))
|
||||
|
||||
tmp_fname = path.join(
|
||||
self.builder.outdir, self.builder.imagedir, fname + tmp_ext)
|
||||
|
||||
if not path.isfile(tmp_fname):
|
||||
mkdir(path.dirname(tmp_fname))
|
||||
with open(tmp_fname, "w") as out:
|
||||
out.write(code)
|
||||
|
||||
img_node = nodes.image(node.rawsource, **node.attributes)
|
||||
img_node['uri'] = path.join(self.builder.imgpath, fname + tmp_ext)
|
||||
img_node['candidates'] = {
|
||||
'*': path.join(self.builder.imgpath, fname + tmp_ext)}
|
||||
|
||||
literal_block.replace_self(img_node)
|
||||
convert_image(img_node, self, tmp_fname)
|
||||
|
||||
|
||||
class kernel_render(nodes.General, nodes.Inline, nodes.Element):
|
||||
"""Node for ``kernel-render`` directive."""
|
||||
pass
|
||||
|
||||
class KernelRender(Figure):
|
||||
u"""KernelRender directive
|
||||
|
||||
Render content by external tool. Has all the options known from the
|
||||
*figure* directive, plus option ``caption``. If ``caption`` has a
|
||||
value, a figure node with the *caption* is inserted. If not, a image node is
|
||||
inserted.
|
||||
|
||||
The KernelRender directive wraps the text of the directive into a
|
||||
literal_block node and wraps it into a kernel_render node. See
|
||||
``visit_kernel_render``.
|
||||
"""
|
||||
has_content = True
|
||||
required_arguments = 1
|
||||
optional_arguments = 0
|
||||
final_argument_whitespace = False
|
||||
|
||||
# earn options from 'figure'
|
||||
option_spec = Figure.option_spec.copy()
|
||||
option_spec['caption'] = directives.unchanged
|
||||
|
||||
def run(self):
|
||||
return [self.build_node()]
|
||||
|
||||
def build_node(self):
|
||||
|
||||
srclang = self.arguments[0].strip()
|
||||
if srclang not in RENDER_MARKUP_EXT.keys():
|
||||
return [self.state_machine.reporter.warning(
|
||||
'Unknown source language "%s", use one of: %s.' % (
|
||||
srclang, ",".join(RENDER_MARKUP_EXT.keys())),
|
||||
line=self.lineno)]
|
||||
|
||||
code = '\n'.join(self.content)
|
||||
if not code.strip():
|
||||
return [self.state_machine.reporter.warning(
|
||||
'Ignoring "%s" directive without content.' % (
|
||||
self.name),
|
||||
line=self.lineno)]
|
||||
|
||||
node = kernel_render()
|
||||
node['alt'] = self.options.get('alt','')
|
||||
node['srclang'] = srclang
|
||||
literal_node = nodes.literal_block(code, code)
|
||||
node += literal_node
|
||||
|
||||
caption = self.options.get('caption')
|
||||
if caption:
|
||||
# parse caption's content
|
||||
parsed = nodes.Element()
|
||||
self.state.nested_parse(
|
||||
ViewList([caption], source=''), self.content_offset, parsed)
|
||||
caption_node = nodes.caption(
|
||||
parsed[0].rawsource, '', *parsed[0].children)
|
||||
caption_node.source = parsed[0].source
|
||||
caption_node.line = parsed[0].line
|
||||
|
||||
figure_node = nodes.figure('', node)
|
||||
for k,v in self.options.items():
|
||||
figure_node[k] = v
|
||||
figure_node += caption_node
|
||||
|
||||
node = figure_node
|
||||
|
||||
return node
|
||||
|
||||
def add_kernel_figure_to_std_domain(app, doctree):
|
||||
"""Add kernel-figure anchors to 'std' domain.
|
||||
|
||||
The ``StandardDomain.process_doc(..)`` method does not know how to resolve
|
||||
the caption (label) of ``kernel-figure`` directive (it only knows about
|
||||
standard nodes, e.g. table, figure etc.). Without any additional handling
|
||||
this will result in a 'undefined label' for kernel-figures.
|
||||
|
||||
This handle adds labels of kernel-figure to the 'std' domain labels.
|
||||
"""
|
||||
|
||||
std = app.env.domains["std"]
|
||||
docname = app.env.docname
|
||||
labels = std.data["labels"]
|
||||
|
||||
for name, explicit in iteritems(doctree.nametypes):
|
||||
if not explicit:
|
||||
continue
|
||||
labelid = doctree.nameids[name]
|
||||
if labelid is None:
|
||||
continue
|
||||
node = doctree.ids[labelid]
|
||||
|
||||
if node.tagname == 'kernel_figure':
|
||||
for n in node.next_node():
|
||||
if n.tagname == 'caption':
|
||||
sectname = clean_astext(n)
|
||||
# add label to std domain
|
||||
labels[name] = docname, labelid, sectname
|
||||
break
|
32
doc/sphinx/load_config.py
Normal file
32
doc/sphinx/load_config.py
Normal file
@@ -0,0 +1,32 @@
|
||||
# -*- coding: utf-8; mode: python -*-
|
||||
# pylint: disable=R0903, C0330, R0914, R0912, E0401
|
||||
|
||||
import os
|
||||
import sys
|
||||
from sphinx.util.pycompat import execfile_
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
def loadConfig(namespace):
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
u"""Load an additional configuration file into *namespace*.
|
||||
|
||||
The name of the configuration file is taken from the environment
|
||||
``SPHINX_CONF``. The external configuration file extends (or overwrites) the
|
||||
configuration values from the origin ``conf.py``. With this you are able to
|
||||
maintain *build themes*. """
|
||||
|
||||
config_file = os.environ.get("SPHINX_CONF", None)
|
||||
if (config_file is not None
|
||||
and os.path.normpath(namespace["__file__"]) != os.path.normpath(config_file) ):
|
||||
config_file = os.path.abspath(config_file)
|
||||
|
||||
if os.path.isfile(config_file):
|
||||
sys.stdout.write("load additional sphinx-config: %s\n" % config_file)
|
||||
config = namespace.copy()
|
||||
config['__file__'] = config_file
|
||||
execfile_(config_file, config)
|
||||
del config['__file__']
|
||||
namespace.update(config)
|
||||
else:
|
||||
sys.stderr.write("WARNING: additional sphinx-config not found: %s\n" % config_file)
|
401
doc/sphinx/parse-headers.pl
Executable file
401
doc/sphinx/parse-headers.pl
Executable file
@@ -0,0 +1,401 @@
|
||||
#!/usr/bin/perl
|
||||
use strict;
|
||||
use Text::Tabs;
|
||||
use Getopt::Long;
|
||||
use Pod::Usage;
|
||||
|
||||
my $debug;
|
||||
my $help;
|
||||
my $man;
|
||||
|
||||
GetOptions(
|
||||
"debug" => \$debug,
|
||||
'usage|?' => \$help,
|
||||
'help' => \$man
|
||||
) or pod2usage(2);
|
||||
|
||||
pod2usage(1) if $help;
|
||||
pod2usage(-exitstatus => 0, -verbose => 2) if $man;
|
||||
pod2usage(2) if (scalar @ARGV < 2 || scalar @ARGV > 3);
|
||||
|
||||
my ($file_in, $file_out, $file_exceptions) = @ARGV;
|
||||
|
||||
my $data;
|
||||
my %ioctls;
|
||||
my %defines;
|
||||
my %typedefs;
|
||||
my %enums;
|
||||
my %enum_symbols;
|
||||
my %structs;
|
||||
|
||||
require Data::Dumper if ($debug);
|
||||
|
||||
#
|
||||
# read the file and get identifiers
|
||||
#
|
||||
|
||||
my $is_enum = 0;
|
||||
my $is_comment = 0;
|
||||
open IN, $file_in or die "Can't open $file_in";
|
||||
while (<IN>) {
|
||||
$data .= $_;
|
||||
|
||||
my $ln = $_;
|
||||
if (!$is_comment) {
|
||||
$ln =~ s,/\*.*(\*/),,g;
|
||||
|
||||
$is_comment = 1 if ($ln =~ s,/\*.*,,);
|
||||
} else {
|
||||
if ($ln =~ s,^(.*\*/),,) {
|
||||
$is_comment = 0;
|
||||
} else {
|
||||
next;
|
||||
}
|
||||
}
|
||||
|
||||
if ($is_enum && $ln =~ m/^\s*([_\w][\w\d_]+)\s*[\,=]?/) {
|
||||
my $s = $1;
|
||||
my $n = $1;
|
||||
$n =~ tr/A-Z/a-z/;
|
||||
$n =~ tr/_/-/;
|
||||
|
||||
$enum_symbols{$s} = "\\ :ref:`$s <$n>`\\ ";
|
||||
|
||||
$is_enum = 0 if ($is_enum && m/\}/);
|
||||
next;
|
||||
}
|
||||
$is_enum = 0 if ($is_enum && m/\}/);
|
||||
|
||||
if ($ln =~ m/^\s*#\s*define\s+([_\w][\w\d_]+)\s+_IO/) {
|
||||
my $s = $1;
|
||||
my $n = $1;
|
||||
$n =~ tr/A-Z/a-z/;
|
||||
|
||||
$ioctls{$s} = "\\ :ref:`$s <$n>`\\ ";
|
||||
next;
|
||||
}
|
||||
|
||||
if ($ln =~ m/^\s*#\s*define\s+([_\w][\w\d_]+)\s+/) {
|
||||
my $s = $1;
|
||||
my $n = $1;
|
||||
$n =~ tr/A-Z/a-z/;
|
||||
$n =~ tr/_/-/;
|
||||
|
||||
$defines{$s} = "\\ :ref:`$s <$n>`\\ ";
|
||||
next;
|
||||
}
|
||||
|
||||
if ($ln =~ m/^\s*typedef\s+([_\w][\w\d_]+)\s+(.*)\s+([_\w][\w\d_]+);/) {
|
||||
my $s = $2;
|
||||
my $n = $3;
|
||||
|
||||
$typedefs{$n} = "\\ :c:type:`$n <$s>`\\ ";
|
||||
next;
|
||||
}
|
||||
if ($ln =~ m/^\s*enum\s+([_\w][\w\d_]+)\s+\{/
|
||||
|| $ln =~ m/^\s*enum\s+([_\w][\w\d_]+)$/
|
||||
|| $ln =~ m/^\s*typedef\s*enum\s+([_\w][\w\d_]+)\s+\{/
|
||||
|| $ln =~ m/^\s*typedef\s*enum\s+([_\w][\w\d_]+)$/) {
|
||||
my $s = $1;
|
||||
|
||||
$enums{$s} = "enum :c:type:`$s`\\ ";
|
||||
|
||||
$is_enum = $1;
|
||||
next;
|
||||
}
|
||||
if ($ln =~ m/^\s*struct\s+([_\w][\w\d_]+)\s+\{/
|
||||
|| $ln =~ m/^\s*struct\s+([[_\w][\w\d_]+)$/
|
||||
|| $ln =~ m/^\s*typedef\s*struct\s+([_\w][\w\d_]+)\s+\{/
|
||||
|| $ln =~ m/^\s*typedef\s*struct\s+([[_\w][\w\d_]+)$/
|
||||
) {
|
||||
my $s = $1;
|
||||
|
||||
$structs{$s} = "struct :c:type:`$s`\\ ";
|
||||
next;
|
||||
}
|
||||
}
|
||||
close IN;
|
||||
|
||||
#
|
||||
# Handle multi-line typedefs
|
||||
#
|
||||
|
||||
my @matches = ($data =~ m/typedef\s+struct\s+\S+?\s*\{[^\}]+\}\s*(\S+)\s*\;/g,
|
||||
$data =~ m/typedef\s+enum\s+\S+?\s*\{[^\}]+\}\s*(\S+)\s*\;/g,);
|
||||
foreach my $m (@matches) {
|
||||
my $s = $m;
|
||||
|
||||
$typedefs{$s} = "\\ :c:type:`$s`\\ ";
|
||||
next;
|
||||
}
|
||||
|
||||
#
|
||||
# Handle exceptions, if any
|
||||
#
|
||||
|
||||
my %def_reftype = (
|
||||
"ioctl" => ":ref",
|
||||
"define" => ":ref",
|
||||
"symbol" => ":ref",
|
||||
"typedef" => ":c:type",
|
||||
"enum" => ":c:type",
|
||||
"struct" => ":c:type",
|
||||
);
|
||||
|
||||
if ($file_exceptions) {
|
||||
open IN, $file_exceptions or die "Can't read $file_exceptions";
|
||||
while (<IN>) {
|
||||
next if (m/^\s*$/ || m/^\s*#/);
|
||||
|
||||
# Parsers to ignore a symbol
|
||||
|
||||
if (m/^ignore\s+ioctl\s+(\S+)/) {
|
||||
delete $ioctls{$1} if (exists($ioctls{$1}));
|
||||
next;
|
||||
}
|
||||
if (m/^ignore\s+define\s+(\S+)/) {
|
||||
delete $defines{$1} if (exists($defines{$1}));
|
||||
next;
|
||||
}
|
||||
if (m/^ignore\s+typedef\s+(\S+)/) {
|
||||
delete $typedefs{$1} if (exists($typedefs{$1}));
|
||||
next;
|
||||
}
|
||||
if (m/^ignore\s+enum\s+(\S+)/) {
|
||||
delete $enums{$1} if (exists($enums{$1}));
|
||||
next;
|
||||
}
|
||||
if (m/^ignore\s+struct\s+(\S+)/) {
|
||||
delete $structs{$1} if (exists($structs{$1}));
|
||||
next;
|
||||
}
|
||||
if (m/^ignore\s+symbol\s+(\S+)/) {
|
||||
delete $enum_symbols{$1} if (exists($enum_symbols{$1}));
|
||||
next;
|
||||
}
|
||||
|
||||
# Parsers to replace a symbol
|
||||
my ($type, $old, $new, $reftype);
|
||||
|
||||
if (m/^replace\s+(\S+)\s+(\S+)\s+(\S+)/) {
|
||||
$type = $1;
|
||||
$old = $2;
|
||||
$new = $3;
|
||||
} else {
|
||||
die "Can't parse $file_exceptions: $_";
|
||||
}
|
||||
|
||||
if ($new =~ m/^\:c\:(data|func|macro|type)\:\`(.+)\`/) {
|
||||
$reftype = ":c:$1";
|
||||
$new = $2;
|
||||
} elsif ($new =~ m/\:ref\:\`(.+)\`/) {
|
||||
$reftype = ":ref";
|
||||
$new = $1;
|
||||
} else {
|
||||
$reftype = $def_reftype{$type};
|
||||
}
|
||||
$new = "$reftype:`$old <$new>`";
|
||||
|
||||
if ($type eq "ioctl") {
|
||||
$ioctls{$old} = $new if (exists($ioctls{$old}));
|
||||
next;
|
||||
}
|
||||
if ($type eq "define") {
|
||||
$defines{$old} = $new if (exists($defines{$old}));
|
||||
next;
|
||||
}
|
||||
if ($type eq "symbol") {
|
||||
$enum_symbols{$old} = $new if (exists($enum_symbols{$old}));
|
||||
next;
|
||||
}
|
||||
if ($type eq "typedef") {
|
||||
$typedefs{$old} = $new if (exists($typedefs{$old}));
|
||||
next;
|
||||
}
|
||||
if ($type eq "enum") {
|
||||
$enums{$old} = $new if (exists($enums{$old}));
|
||||
next;
|
||||
}
|
||||
if ($type eq "struct") {
|
||||
$structs{$old} = $new if (exists($structs{$old}));
|
||||
next;
|
||||
}
|
||||
|
||||
die "Can't parse $file_exceptions: $_";
|
||||
}
|
||||
}
|
||||
|
||||
if ($debug) {
|
||||
print Data::Dumper->Dump([\%ioctls], [qw(*ioctls)]) if (%ioctls);
|
||||
print Data::Dumper->Dump([\%typedefs], [qw(*typedefs)]) if (%typedefs);
|
||||
print Data::Dumper->Dump([\%enums], [qw(*enums)]) if (%enums);
|
||||
print Data::Dumper->Dump([\%structs], [qw(*structs)]) if (%structs);
|
||||
print Data::Dumper->Dump([\%defines], [qw(*defines)]) if (%defines);
|
||||
print Data::Dumper->Dump([\%enum_symbols], [qw(*enum_symbols)]) if (%enum_symbols);
|
||||
}
|
||||
|
||||
#
|
||||
# Align block
|
||||
#
|
||||
$data = expand($data);
|
||||
$data = " " . $data;
|
||||
$data =~ s/\n/\n /g;
|
||||
$data =~ s/\n\s+$/\n/g;
|
||||
$data =~ s/\n\s+\n/\n\n/g;
|
||||
|
||||
#
|
||||
# Add escape codes for special characters
|
||||
#
|
||||
$data =~ s,([\_\`\*\<\>\&\\\\:\/\|\%\$\#\{\}\~\^]),\\$1,g;
|
||||
|
||||
$data =~ s,DEPRECATED,**DEPRECATED**,g;
|
||||
|
||||
#
|
||||
# Add references
|
||||
#
|
||||
|
||||
my $start_delim = "[ \n\t\(\=\*\@]";
|
||||
my $end_delim = "(\\s|,|\\\\=|\\\\:|\\;|\\\)|\\}|\\{)";
|
||||
|
||||
foreach my $r (keys %ioctls) {
|
||||
my $s = $ioctls{$r};
|
||||
|
||||
$r =~ s,([\_\`\*\<\>\&\\\\:\/]),\\\\$1,g;
|
||||
|
||||
print "$r -> $s\n" if ($debug);
|
||||
|
||||
$data =~ s/($start_delim)($r)$end_delim/$1$s$3/g;
|
||||
}
|
||||
|
||||
foreach my $r (keys %defines) {
|
||||
my $s = $defines{$r};
|
||||
|
||||
$r =~ s,([\_\`\*\<\>\&\\\\:\/]),\\\\$1,g;
|
||||
|
||||
print "$r -> $s\n" if ($debug);
|
||||
|
||||
$data =~ s/($start_delim)($r)$end_delim/$1$s$3/g;
|
||||
}
|
||||
|
||||
foreach my $r (keys %enum_symbols) {
|
||||
my $s = $enum_symbols{$r};
|
||||
|
||||
$r =~ s,([\_\`\*\<\>\&\\\\:\/]),\\\\$1,g;
|
||||
|
||||
print "$r -> $s\n" if ($debug);
|
||||
|
||||
$data =~ s/($start_delim)($r)$end_delim/$1$s$3/g;
|
||||
}
|
||||
|
||||
foreach my $r (keys %enums) {
|
||||
my $s = $enums{$r};
|
||||
|
||||
$r =~ s,([\_\`\*\<\>\&\\\\:\/]),\\\\$1,g;
|
||||
|
||||
print "$r -> $s\n" if ($debug);
|
||||
|
||||
$data =~ s/enum\s+($r)$end_delim/$s$2/g;
|
||||
}
|
||||
|
||||
foreach my $r (keys %structs) {
|
||||
my $s = $structs{$r};
|
||||
|
||||
$r =~ s,([\_\`\*\<\>\&\\\\:\/]),\\\\$1,g;
|
||||
|
||||
print "$r -> $s\n" if ($debug);
|
||||
|
||||
$data =~ s/struct\s+($r)$end_delim/$s$2/g;
|
||||
}
|
||||
|
||||
foreach my $r (keys %typedefs) {
|
||||
my $s = $typedefs{$r};
|
||||
|
||||
$r =~ s,([\_\`\*\<\>\&\\\\:\/]),\\\\$1,g;
|
||||
|
||||
print "$r -> $s\n" if ($debug);
|
||||
$data =~ s/($start_delim)($r)$end_delim/$1$s$3/g;
|
||||
}
|
||||
|
||||
$data =~ s/\\ ([\n\s])/\1/g;
|
||||
|
||||
#
|
||||
# Generate output file
|
||||
#
|
||||
|
||||
my $title = $file_in;
|
||||
$title =~ s,.*/,,;
|
||||
|
||||
open OUT, "> $file_out" or die "Can't open $file_out";
|
||||
print OUT ".. -*- coding: utf-8; mode: rst -*-\n\n";
|
||||
print OUT "$title\n";
|
||||
print OUT "=" x length($title);
|
||||
print OUT "\n\n.. parsed-literal::\n\n";
|
||||
print OUT $data;
|
||||
close OUT;
|
||||
|
||||
__END__
|
||||
|
||||
=head1 NAME
|
||||
|
||||
parse_headers.pl - parse a C file, in order to identify functions, structs,
|
||||
enums and defines and create cross-references to a Sphinx book.
|
||||
|
||||
=head1 SYNOPSIS
|
||||
|
||||
B<parse_headers.pl> [<options>] <C_FILE> <OUT_FILE> [<EXCEPTIONS_FILE>]
|
||||
|
||||
Where <options> can be: --debug, --help or --man.
|
||||
|
||||
=head1 OPTIONS
|
||||
|
||||
=over 8
|
||||
|
||||
=item B<--debug>
|
||||
|
||||
Put the script in verbose mode, useful for debugging.
|
||||
|
||||
=item B<--usage>
|
||||
|
||||
Prints a brief help message and exits.
|
||||
|
||||
=item B<--help>
|
||||
|
||||
Prints a more detailed help message and exits.
|
||||
|
||||
=back
|
||||
|
||||
=head1 DESCRIPTION
|
||||
|
||||
Convert a C header or source file (C_FILE), into a ReStructured Text
|
||||
included via ..parsed-literal block with cross-references for the
|
||||
documentation files that describe the API. It accepts an optional
|
||||
EXCEPTIONS_FILE with describes what elements will be either ignored or
|
||||
be pointed to a non-default reference.
|
||||
|
||||
The output is written at the (OUT_FILE).
|
||||
|
||||
It is capable of identifying defines, functions, structs, typedefs,
|
||||
enums and enum symbols and create cross-references for all of them.
|
||||
It is also capable of distinguish #define used for specifying a Linux
|
||||
ioctl.
|
||||
|
||||
The EXCEPTIONS_FILE contain two rules to allow ignoring a symbol or
|
||||
to replace the default references by a custom one.
|
||||
|
||||
Please read doc/doc-guide/parse-headers.rst at the Kernel's
|
||||
tree for more details.
|
||||
|
||||
=head1 BUGS
|
||||
|
||||
Report bugs to Mauro Carvalho Chehab <mchehab@kernel.org>
|
||||
|
||||
=head1 COPYRIGHT
|
||||
|
||||
Copyright (c) 2016 by Mauro Carvalho Chehab <mchehab+samsung@kernel.org>.
|
||||
|
||||
License GPLv2: GNU GPL version 2 <http://gnu.org/licenses/gpl.html>.
|
||||
|
||||
This is free software: you are free to change and redistribute it.
|
||||
There is NO WARRANTY, to the extent permitted by law.
|
||||
|
||||
=cut
|
3
doc/sphinx/requirements.txt
Normal file
3
doc/sphinx/requirements.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
docutils==0.12
|
||||
Sphinx==1.4.9
|
||||
sphinx_rtd_theme
|
376
doc/sphinx/rstFlatTable.py
Executable file
376
doc/sphinx/rstFlatTable.py
Executable file
@@ -0,0 +1,376 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8; mode: python -*-
|
||||
# pylint: disable=C0330, R0903, R0912
|
||||
|
||||
u"""
|
||||
flat-table
|
||||
~~~~~~~~~~
|
||||
|
||||
Implementation of the ``flat-table`` reST-directive.
|
||||
|
||||
:copyright: Copyright (C) 2016 Markus Heiser
|
||||
:license: GPL Version 2, June 1991 see linux/COPYING for details.
|
||||
|
||||
The ``flat-table`` (:py:class:`FlatTable`) is a double-stage list similar to
|
||||
the ``list-table`` with some additional features:
|
||||
|
||||
* *column-span*: with the role ``cspan`` a cell can be extended through
|
||||
additional columns
|
||||
|
||||
* *row-span*: with the role ``rspan`` a cell can be extended through
|
||||
additional rows
|
||||
|
||||
* *auto span* rightmost cell of a table row over the missing cells on the
|
||||
right side of that table-row. With Option ``:fill-cells:`` this behavior
|
||||
can changed from *auto span* to *auto fill*, which automaticly inserts
|
||||
(empty) cells instead of spanning the last cell.
|
||||
|
||||
Options:
|
||||
|
||||
* header-rows: [int] count of header rows
|
||||
* stub-columns: [int] count of stub columns
|
||||
* widths: [[int] [int] ... ] widths of columns
|
||||
* fill-cells: instead of autospann missing cells, insert missing cells
|
||||
|
||||
roles:
|
||||
|
||||
* cspan: [int] additionale columns (*morecols*)
|
||||
* rspan: [int] additionale rows (*morerows*)
|
||||
"""
|
||||
|
||||
# ==============================================================================
|
||||
# imports
|
||||
# ==============================================================================
|
||||
|
||||
import sys
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.parsers.rst import directives, roles
|
||||
from docutils.parsers.rst.directives.tables import Table
|
||||
from docutils.utils import SystemMessagePropagation
|
||||
|
||||
# ==============================================================================
|
||||
# common globals
|
||||
# ==============================================================================
|
||||
|
||||
# The version numbering follows numbering of the specification
|
||||
# (doc/books/kernel-doc-HOWTO).
|
||||
__version__ = '1.0'
|
||||
|
||||
PY3 = sys.version_info[0] == 3
|
||||
PY2 = sys.version_info[0] == 2
|
||||
|
||||
if PY3:
|
||||
# pylint: disable=C0103, W0622
|
||||
unicode = str
|
||||
basestring = str
|
||||
|
||||
# ==============================================================================
|
||||
def setup(app):
|
||||
# ==============================================================================
|
||||
|
||||
app.add_directive("flat-table", FlatTable)
|
||||
roles.register_local_role('cspan', c_span)
|
||||
roles.register_local_role('rspan', r_span)
|
||||
|
||||
return dict(
|
||||
version = __version__,
|
||||
parallel_read_safe = True,
|
||||
parallel_write_safe = True
|
||||
)
|
||||
|
||||
# ==============================================================================
|
||||
def c_span(name, rawtext, text, lineno, inliner, options=None, content=None):
|
||||
# ==============================================================================
|
||||
# pylint: disable=W0613
|
||||
|
||||
options = options if options is not None else {}
|
||||
content = content if content is not None else []
|
||||
nodelist = [colSpan(span=int(text))]
|
||||
msglist = []
|
||||
return nodelist, msglist
|
||||
|
||||
# ==============================================================================
|
||||
def r_span(name, rawtext, text, lineno, inliner, options=None, content=None):
|
||||
# ==============================================================================
|
||||
# pylint: disable=W0613
|
||||
|
||||
options = options if options is not None else {}
|
||||
content = content if content is not None else []
|
||||
nodelist = [rowSpan(span=int(text))]
|
||||
msglist = []
|
||||
return nodelist, msglist
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
class rowSpan(nodes.General, nodes.Element): pass # pylint: disable=C0103,C0321
|
||||
class colSpan(nodes.General, nodes.Element): pass # pylint: disable=C0103,C0321
|
||||
# ==============================================================================
|
||||
|
||||
# ==============================================================================
|
||||
class FlatTable(Table):
|
||||
# ==============================================================================
|
||||
|
||||
u"""FlatTable (``flat-table``) directive"""
|
||||
|
||||
option_spec = {
|
||||
'name': directives.unchanged
|
||||
, 'class': directives.class_option
|
||||
, 'header-rows': directives.nonnegative_int
|
||||
, 'stub-columns': directives.nonnegative_int
|
||||
, 'widths': directives.positive_int_list
|
||||
, 'fill-cells' : directives.flag }
|
||||
|
||||
def run(self):
|
||||
|
||||
if not self.content:
|
||||
error = self.state_machine.reporter.error(
|
||||
'The "%s" directive is empty; content required.' % self.name,
|
||||
nodes.literal_block(self.block_text, self.block_text),
|
||||
line=self.lineno)
|
||||
return [error]
|
||||
|
||||
title, messages = self.make_title()
|
||||
node = nodes.Element() # anonymous container for parsing
|
||||
self.state.nested_parse(self.content, self.content_offset, node)
|
||||
|
||||
tableBuilder = ListTableBuilder(self)
|
||||
tableBuilder.parseFlatTableNode(node)
|
||||
tableNode = tableBuilder.buildTableNode()
|
||||
# SDK.CONSOLE() # print --> tableNode.asdom().toprettyxml()
|
||||
if title:
|
||||
tableNode.insert(0, title)
|
||||
return [tableNode] + messages
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
class ListTableBuilder(object):
|
||||
# ==============================================================================
|
||||
|
||||
u"""Builds a table from a double-stage list"""
|
||||
|
||||
def __init__(self, directive):
|
||||
self.directive = directive
|
||||
self.rows = []
|
||||
self.max_cols = 0
|
||||
|
||||
def buildTableNode(self):
|
||||
|
||||
colwidths = self.directive.get_column_widths(self.max_cols)
|
||||
if isinstance(colwidths, tuple):
|
||||
# Since docutils 0.13, get_column_widths returns a (widths,
|
||||
# colwidths) tuple, where widths is a string (i.e. 'auto').
|
||||
# See https://sourceforge.net/p/docutils/patches/120/.
|
||||
colwidths = colwidths[1]
|
||||
stub_columns = self.directive.options.get('stub-columns', 0)
|
||||
header_rows = self.directive.options.get('header-rows', 0)
|
||||
|
||||
table = nodes.table()
|
||||
tgroup = nodes.tgroup(cols=len(colwidths))
|
||||
table += tgroup
|
||||
|
||||
|
||||
for colwidth in colwidths:
|
||||
colspec = nodes.colspec(colwidth=colwidth)
|
||||
# FIXME: It seems, that the stub method only works well in the
|
||||
# absence of rowspan (observed by the html buidler, the docutils-xml
|
||||
# build seems OK). This is not extraordinary, because there exists
|
||||
# no table directive (except *this* flat-table) which allows to
|
||||
# define coexistent of rowspan and stubs (there was no use-case
|
||||
# before flat-table). This should be reviewed (later).
|
||||
if stub_columns:
|
||||
colspec.attributes['stub'] = 1
|
||||
stub_columns -= 1
|
||||
tgroup += colspec
|
||||
stub_columns = self.directive.options.get('stub-columns', 0)
|
||||
|
||||
if header_rows:
|
||||
thead = nodes.thead()
|
||||
tgroup += thead
|
||||
for row in self.rows[:header_rows]:
|
||||
thead += self.buildTableRowNode(row)
|
||||
|
||||
tbody = nodes.tbody()
|
||||
tgroup += tbody
|
||||
|
||||
for row in self.rows[header_rows:]:
|
||||
tbody += self.buildTableRowNode(row)
|
||||
return table
|
||||
|
||||
def buildTableRowNode(self, row_data, classes=None):
|
||||
classes = [] if classes is None else classes
|
||||
row = nodes.row()
|
||||
for cell in row_data:
|
||||
if cell is None:
|
||||
continue
|
||||
cspan, rspan, cellElements = cell
|
||||
|
||||
attributes = {"classes" : classes}
|
||||
if rspan:
|
||||
attributes['morerows'] = rspan
|
||||
if cspan:
|
||||
attributes['morecols'] = cspan
|
||||
entry = nodes.entry(**attributes)
|
||||
entry.extend(cellElements)
|
||||
row += entry
|
||||
return row
|
||||
|
||||
def raiseError(self, msg):
|
||||
error = self.directive.state_machine.reporter.error(
|
||||
msg
|
||||
, nodes.literal_block(self.directive.block_text
|
||||
, self.directive.block_text)
|
||||
, line = self.directive.lineno )
|
||||
raise SystemMessagePropagation(error)
|
||||
|
||||
def parseFlatTableNode(self, node):
|
||||
u"""parses the node from a :py:class:`FlatTable` directive's body"""
|
||||
|
||||
if len(node) != 1 or not isinstance(node[0], nodes.bullet_list):
|
||||
self.raiseError(
|
||||
'Error parsing content block for the "%s" directive: '
|
||||
'exactly one bullet list expected.' % self.directive.name )
|
||||
|
||||
for rowNum, rowItem in enumerate(node[0]):
|
||||
row = self.parseRowItem(rowItem, rowNum)
|
||||
self.rows.append(row)
|
||||
self.roundOffTableDefinition()
|
||||
|
||||
def roundOffTableDefinition(self):
|
||||
u"""Round off the table definition.
|
||||
|
||||
This method rounds off the table definition in :py:member:`rows`.
|
||||
|
||||
* This method inserts the needed ``None`` values for the missing cells
|
||||
arising from spanning cells over rows and/or columns.
|
||||
|
||||
* recount the :py:member:`max_cols`
|
||||
|
||||
* Autospan or fill (option ``fill-cells``) missing cells on the right
|
||||
side of the table-row
|
||||
"""
|
||||
|
||||
y = 0
|
||||
while y < len(self.rows):
|
||||
x = 0
|
||||
|
||||
while x < len(self.rows[y]):
|
||||
cell = self.rows[y][x]
|
||||
if cell is None:
|
||||
x += 1
|
||||
continue
|
||||
cspan, rspan = cell[:2]
|
||||
# handle colspan in current row
|
||||
for c in range(cspan):
|
||||
try:
|
||||
self.rows[y].insert(x+c+1, None)
|
||||
except: # pylint: disable=W0702
|
||||
# the user sets ambiguous rowspans
|
||||
pass # SDK.CONSOLE()
|
||||
# handle colspan in spanned rows
|
||||
for r in range(rspan):
|
||||
for c in range(cspan + 1):
|
||||
try:
|
||||
self.rows[y+r+1].insert(x+c, None)
|
||||
except: # pylint: disable=W0702
|
||||
# the user sets ambiguous rowspans
|
||||
pass # SDK.CONSOLE()
|
||||
x += 1
|
||||
y += 1
|
||||
|
||||
# Insert the missing cells on the right side. For this, first
|
||||
# re-calculate the max columns.
|
||||
|
||||
for row in self.rows:
|
||||
if self.max_cols < len(row):
|
||||
self.max_cols = len(row)
|
||||
|
||||
# fill with empty cells or cellspan?
|
||||
|
||||
fill_cells = False
|
||||
if 'fill-cells' in self.directive.options:
|
||||
fill_cells = True
|
||||
|
||||
for row in self.rows:
|
||||
x = self.max_cols - len(row)
|
||||
if x and not fill_cells:
|
||||
if row[-1] is None:
|
||||
row.append( ( x - 1, 0, []) )
|
||||
else:
|
||||
cspan, rspan, content = row[-1]
|
||||
row[-1] = (cspan + x, rspan, content)
|
||||
elif x and fill_cells:
|
||||
for i in range(x):
|
||||
row.append( (0, 0, nodes.comment()) )
|
||||
|
||||
def pprint(self):
|
||||
# for debugging
|
||||
retVal = "[ "
|
||||
for row in self.rows:
|
||||
retVal += "[ "
|
||||
for col in row:
|
||||
if col is None:
|
||||
retVal += ('%r' % col)
|
||||
retVal += "\n , "
|
||||
else:
|
||||
content = col[2][0].astext()
|
||||
if len (content) > 30:
|
||||
content = content[:30] + "..."
|
||||
retVal += ('(cspan=%s, rspan=%s, %r)'
|
||||
% (col[0], col[1], content))
|
||||
retVal += "]\n , "
|
||||
retVal = retVal[:-2]
|
||||
retVal += "]\n , "
|
||||
retVal = retVal[:-2]
|
||||
return retVal + "]"
|
||||
|
||||
def parseRowItem(self, rowItem, rowNum):
|
||||
row = []
|
||||
childNo = 0
|
||||
error = False
|
||||
cell = None
|
||||
target = None
|
||||
|
||||
for child in rowItem:
|
||||
if (isinstance(child , nodes.comment)
|
||||
or isinstance(child, nodes.system_message)):
|
||||
pass
|
||||
elif isinstance(child , nodes.target):
|
||||
target = child
|
||||
elif isinstance(child, nodes.bullet_list):
|
||||
childNo += 1
|
||||
cell = child
|
||||
else:
|
||||
error = True
|
||||
break
|
||||
|
||||
if childNo != 1 or error:
|
||||
self.raiseError(
|
||||
'Error parsing content block for the "%s" directive: '
|
||||
'two-level bullet list expected, but row %s does not '
|
||||
'contain a second-level bullet list.'
|
||||
% (self.directive.name, rowNum + 1))
|
||||
|
||||
for cellItem in cell:
|
||||
cspan, rspan, cellElements = self.parseCellItem(cellItem)
|
||||
if target is not None:
|
||||
cellElements.insert(0, target)
|
||||
row.append( (cspan, rspan, cellElements) )
|
||||
return row
|
||||
|
||||
def parseCellItem(self, cellItem):
|
||||
# search and remove cspan, rspan colspec from the first element in
|
||||
# this listItem (field).
|
||||
cspan = rspan = 0
|
||||
if not len(cellItem):
|
||||
return cspan, rspan, []
|
||||
for elem in cellItem[0]:
|
||||
if isinstance(elem, colSpan):
|
||||
cspan = elem.get("span")
|
||||
elem.parent.remove(elem)
|
||||
continue
|
||||
if isinstance(elem, rowSpan):
|
||||
rspan = elem.get("span")
|
||||
elem.parent.remove(elem)
|
||||
continue
|
||||
return cspan, rspan, cellItem[:]
|
Reference in New Issue
Block a user