mirror of
https://github.com/torvalds/linux.git
synced 2024-12-26 12:52:30 +00:00
Documentation: Document each netlink family
This is a simple script that parses the Netlink YAML spec files (Documentation/netlink/specs/), and generates RST files to be rendered in the Network -> Netlink Specification documentation page. Create a python script that is invoked during 'make htmldocs', reads the YAML specs input file and generate the correspondent RST file. Create a new Documentation/networking/netlink_spec index page, and reference each Netlink RST file that was processed above in this main index.rst file. In case of any exception during the parsing, dump the error and skip the file. Do not regenerate the RST files if the input files (YAML) were not changed in-between invocations. Suggested-by: Jakub Kicinski <kuba@kernel.org> Signed-off-by: Breno Leitao <leitao@debian.org> ---- Changelog: V3: * Do not regenerate the RST files if the input files were not changed. In order to do it, a few things changed: - Rely on Makefile more to find what changed, and trigger individual file processing - The script parses file by file now (instead of batches) - Create a new option to generate the index file V2: * Moved the logic from a sphinx extension to a external script * Adjust some formatting as suggested by Donald Hunter and Jakub * Auto generating all the rsts instead of having stubs * Handling error gracefully Reviewed-by: Jakub Kicinski <kuba@kernel.org> Signed-off-by: David S. Miller <davem@davemloft.net>
This commit is contained in:
parent
45c226dde7
commit
f061c9f7d0
@ -97,7 +97,21 @@ quiet_cmd_sphinx = SPHINX $@ --> file://$(abspath $(BUILDDIR)/$3/$4)
|
||||
cp $(if $(patsubst /%,,$(DOCS_CSS)),$(abspath $(srctree)/$(DOCS_CSS)),$(DOCS_CSS)) $(BUILDDIR)/$3/_static/; \
|
||||
fi
|
||||
|
||||
htmldocs:
|
||||
YNL_INDEX:=$(srctree)/Documentation/networking/netlink_spec/index.rst
|
||||
YNL_RST_DIR:=$(srctree)/Documentation/networking/netlink_spec
|
||||
YNL_YAML_DIR:=$(srctree)/Documentation/netlink/specs
|
||||
YNL_TOOL:=$(srctree)/tools/net/ynl/ynl-gen-rst.py
|
||||
|
||||
YNL_RST_FILES_TMP := $(patsubst %.yaml,%.rst,$(wildcard $(YNL_YAML_DIR)/*.yaml))
|
||||
YNL_RST_FILES := $(patsubst $(YNL_YAML_DIR)%,$(YNL_RST_DIR)%, $(YNL_RST_FILES_TMP))
|
||||
|
||||
$(YNL_INDEX): $(YNL_RST_FILES)
|
||||
@$(YNL_TOOL) -o $@ -x
|
||||
|
||||
$(YNL_RST_DIR)/%.rst: $(YNL_YAML_DIR)/%.yaml
|
||||
@$(YNL_TOOL) -i $< -o $@
|
||||
|
||||
htmldocs: $(YNL_INDEX)
|
||||
@$(srctree)/scripts/sphinx-pre-install --version-check
|
||||
@+$(foreach var,$(SPHINXDIRS),$(call loop_cmd,sphinx,html,$(var),,$(var)))
|
||||
|
||||
|
@ -55,6 +55,7 @@ Contents:
|
||||
filter
|
||||
generic-hdlc
|
||||
generic_netlink
|
||||
netlink_spec/index
|
||||
gen_stats
|
||||
gtp
|
||||
ila
|
||||
|
1
Documentation/networking/netlink_spec/.gitignore
vendored
Normal file
1
Documentation/networking/netlink_spec/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
*.rst
|
4
Documentation/networking/netlink_spec/readme.txt
Normal file
4
Documentation/networking/netlink_spec/readme.txt
Normal file
@ -0,0 +1,4 @@
|
||||
SPDX-License-Identifier: GPL-2.0
|
||||
|
||||
This file is populated during the build of the documentation (htmldocs) by the
|
||||
tools/net/ynl/ynl-gen-rst.py script.
|
388
tools/net/ynl/ynl-gen-rst.py
Executable file
388
tools/net/ynl/ynl-gen-rst.py
Executable file
@ -0,0 +1,388 @@
|
||||
#!/usr/bin/env python3
|
||||
# SPDX-License-Identifier: GPL-2.0
|
||||
# -*- coding: utf-8; mode: python -*-
|
||||
|
||||
"""
|
||||
Script to auto generate the documentation for Netlink specifications.
|
||||
|
||||
:copyright: Copyright (C) 2023 Breno Leitao <leitao@debian.org>
|
||||
:license: GPL Version 2, June 1991 see linux/COPYING for details.
|
||||
|
||||
This script performs extensive parsing to the Linux kernel's netlink YAML
|
||||
spec files, in an effort to avoid needing to heavily mark up the original
|
||||
YAML file.
|
||||
|
||||
This code is split in three big parts:
|
||||
1) RST formatters: Use to convert a string to a RST output
|
||||
2) Parser helpers: Functions to parse the YAML data structure
|
||||
3) Main function and small helpers
|
||||
"""
|
||||
|
||||
from typing import Any, Dict, List
|
||||
import os.path
|
||||
import sys
|
||||
import argparse
|
||||
import logging
|
||||
import yaml
|
||||
|
||||
|
||||
SPACE_PER_LEVEL = 4
|
||||
|
||||
|
||||
# RST Formatters
|
||||
# ==============
|
||||
def headroom(level: int) -> str:
|
||||
"""Return space to format"""
|
||||
return " " * (level * SPACE_PER_LEVEL)
|
||||
|
||||
|
||||
def bold(text: str) -> str:
|
||||
"""Format bold text"""
|
||||
return f"**{text}**"
|
||||
|
||||
|
||||
def inline(text: str) -> str:
|
||||
"""Format inline text"""
|
||||
return f"``{text}``"
|
||||
|
||||
|
||||
def sanitize(text: str) -> str:
|
||||
"""Remove newlines and multiple spaces"""
|
||||
# This is useful for some fields that are spread across multiple lines
|
||||
return str(text).replace("\n", "").strip()
|
||||
|
||||
|
||||
def rst_fields(key: str, value: str, level: int = 0) -> str:
|
||||
"""Return a RST formatted field"""
|
||||
return headroom(level) + f":{key}: {value}"
|
||||
|
||||
|
||||
def rst_definition(key: str, value: Any, level: int = 0) -> str:
|
||||
"""Format a single rst definition"""
|
||||
return headroom(level) + key + "\n" + headroom(level + 1) + str(value)
|
||||
|
||||
|
||||
def rst_paragraph(paragraph: str, level: int = 0) -> str:
|
||||
"""Return a formatted paragraph"""
|
||||
return headroom(level) + paragraph
|
||||
|
||||
|
||||
def rst_bullet(item: str, level: int = 0) -> str:
|
||||
"""Return a formatted a bullet"""
|
||||
return headroom(level) + f" - {item}"
|
||||
|
||||
|
||||
def rst_subsection(title: str) -> str:
|
||||
"""Add a sub-section to the document"""
|
||||
return f"{title}\n" + "-" * len(title)
|
||||
|
||||
|
||||
def rst_subsubsection(title: str) -> str:
|
||||
"""Add a sub-sub-section to the document"""
|
||||
return f"{title}\n" + "~" * len(title)
|
||||
|
||||
|
||||
def rst_section(title: str) -> str:
|
||||
"""Add a section to the document"""
|
||||
return f"\n{title}\n" + "=" * len(title)
|
||||
|
||||
|
||||
def rst_subtitle(title: str) -> str:
|
||||
"""Add a subtitle to the document"""
|
||||
return "\n" + "-" * len(title) + f"\n{title}\n" + "-" * len(title) + "\n\n"
|
||||
|
||||
|
||||
def rst_title(title: str) -> str:
|
||||
"""Add a title to the document"""
|
||||
return "=" * len(title) + f"\n{title}\n" + "=" * len(title) + "\n\n"
|
||||
|
||||
|
||||
def rst_list_inline(list_: List[str], level: int = 0) -> str:
|
||||
"""Format a list using inlines"""
|
||||
return headroom(level) + "[" + ", ".join(inline(i) for i in list_) + "]"
|
||||
|
||||
|
||||
def rst_header() -> str:
|
||||
"""The headers for all the auto generated RST files"""
|
||||
lines = []
|
||||
|
||||
lines.append(rst_paragraph(".. SPDX-License-Identifier: GPL-2.0"))
|
||||
lines.append(rst_paragraph(".. NOTE: This document was auto-generated.\n\n"))
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def rst_toctree(maxdepth: int = 2) -> str:
|
||||
"""Generate a toctree RST primitive"""
|
||||
lines = []
|
||||
|
||||
lines.append(".. toctree::")
|
||||
lines.append(f" :maxdepth: {maxdepth}\n\n")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
# Parsers
|
||||
# =======
|
||||
|
||||
|
||||
def parse_mcast_group(mcast_group: List[Dict[str, Any]]) -> str:
|
||||
"""Parse 'multicast' group list and return a formatted string"""
|
||||
lines = []
|
||||
for group in mcast_group:
|
||||
lines.append(rst_bullet(group["name"]))
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def parse_do(do_dict: Dict[str, Any], level: int = 0) -> str:
|
||||
"""Parse 'do' section and return a formatted string"""
|
||||
lines = []
|
||||
for key in do_dict.keys():
|
||||
lines.append(rst_paragraph(bold(key), level + 1))
|
||||
lines.append(parse_do_attributes(do_dict[key], level + 1) + "\n")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def parse_do_attributes(attrs: Dict[str, Any], level: int = 0) -> str:
|
||||
"""Parse 'attributes' section"""
|
||||
if "attributes" not in attrs:
|
||||
return ""
|
||||
lines = [rst_fields("attributes", rst_list_inline(attrs["attributes"]), level + 1)]
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def parse_operations(operations: List[Dict[str, Any]]) -> str:
|
||||
"""Parse operations block"""
|
||||
preprocessed = ["name", "doc", "title", "do", "dump"]
|
||||
lines = []
|
||||
|
||||
for operation in operations:
|
||||
lines.append(rst_section(operation["name"]))
|
||||
lines.append(rst_paragraph(sanitize(operation["doc"])) + "\n")
|
||||
|
||||
for key in operation.keys():
|
||||
if key in preprocessed:
|
||||
# Skip the special fields
|
||||
continue
|
||||
lines.append(rst_fields(key, operation[key], 0))
|
||||
|
||||
if "do" in operation:
|
||||
lines.append(rst_paragraph(":do:", 0))
|
||||
lines.append(parse_do(operation["do"], 0))
|
||||
if "dump" in operation:
|
||||
lines.append(rst_paragraph(":dump:", 0))
|
||||
lines.append(parse_do(operation["dump"], 0))
|
||||
|
||||
# New line after fields
|
||||
lines.append("\n")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def parse_entries(entries: List[Dict[str, Any]], level: int) -> str:
|
||||
"""Parse a list of entries"""
|
||||
lines = []
|
||||
for entry in entries:
|
||||
if isinstance(entry, dict):
|
||||
# entries could be a list or a dictionary
|
||||
lines.append(
|
||||
rst_fields(entry.get("name", ""), sanitize(entry.get("doc", "")), level)
|
||||
)
|
||||
elif isinstance(entry, list):
|
||||
lines.append(rst_list_inline(entry, level))
|
||||
else:
|
||||
lines.append(rst_bullet(inline(sanitize(entry)), level))
|
||||
|
||||
lines.append("\n")
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def parse_definitions(defs: Dict[str, Any]) -> str:
|
||||
"""Parse definitions section"""
|
||||
preprocessed = ["name", "entries", "members"]
|
||||
ignored = ["render-max"] # This is not printed
|
||||
lines = []
|
||||
|
||||
for definition in defs:
|
||||
lines.append(rst_section(definition["name"]))
|
||||
for k in definition.keys():
|
||||
if k in preprocessed + ignored:
|
||||
continue
|
||||
lines.append(rst_fields(k, sanitize(definition[k]), 0))
|
||||
|
||||
# Field list needs to finish with a new line
|
||||
lines.append("\n")
|
||||
if "entries" in definition:
|
||||
lines.append(rst_paragraph(":entries:", 0))
|
||||
lines.append(parse_entries(definition["entries"], 1))
|
||||
if "members" in definition:
|
||||
lines.append(rst_paragraph(":members:", 0))
|
||||
lines.append(parse_entries(definition["members"], 1))
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def parse_attr_sets(entries: List[Dict[str, Any]]) -> str:
|
||||
"""Parse attribute from attribute-set"""
|
||||
preprocessed = ["name", "type"]
|
||||
ignored = ["checks"]
|
||||
lines = []
|
||||
|
||||
for entry in entries:
|
||||
lines.append(rst_section(entry["name"]))
|
||||
for attr in entry["attributes"]:
|
||||
type_ = attr.get("type")
|
||||
attr_line = bold(attr["name"])
|
||||
if type_:
|
||||
# Add the attribute type in the same line
|
||||
attr_line += f" ({inline(type_)})"
|
||||
|
||||
lines.append(rst_subsubsection(attr_line))
|
||||
|
||||
for k in attr.keys():
|
||||
if k in preprocessed + ignored:
|
||||
continue
|
||||
lines.append(rst_fields(k, sanitize(attr[k]), 2))
|
||||
lines.append("\n")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def parse_yaml(obj: Dict[str, Any]) -> str:
|
||||
"""Format the whole YAML into a RST string"""
|
||||
lines = []
|
||||
|
||||
# Main header
|
||||
|
||||
lines.append(rst_header())
|
||||
|
||||
title = f"Family ``{obj['name']}`` netlink specification"
|
||||
lines.append(rst_title(title))
|
||||
lines.append(rst_paragraph(".. contents::\n"))
|
||||
|
||||
if "doc" in obj:
|
||||
lines.append(rst_subtitle("Summary"))
|
||||
lines.append(rst_paragraph(obj["doc"], 0))
|
||||
|
||||
# Operations
|
||||
if "operations" in obj:
|
||||
lines.append(rst_subtitle("Operations"))
|
||||
lines.append(parse_operations(obj["operations"]["list"]))
|
||||
|
||||
# Multicast groups
|
||||
if "mcast-groups" in obj:
|
||||
lines.append(rst_subtitle("Multicast groups"))
|
||||
lines.append(parse_mcast_group(obj["mcast-groups"]["list"]))
|
||||
|
||||
# Definitions
|
||||
if "definitions" in obj:
|
||||
lines.append(rst_subtitle("Definitions"))
|
||||
lines.append(parse_definitions(obj["definitions"]))
|
||||
|
||||
# Attributes set
|
||||
if "attribute-sets" in obj:
|
||||
lines.append(rst_subtitle("Attribute sets"))
|
||||
lines.append(parse_attr_sets(obj["attribute-sets"]))
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
# Main functions
|
||||
# ==============
|
||||
|
||||
|
||||
def parse_arguments() -> argparse.Namespace:
|
||||
"""Parse arguments from user"""
|
||||
parser = argparse.ArgumentParser(description="Netlink RST generator")
|
||||
|
||||
parser.add_argument("-v", "--verbose", action="store_true")
|
||||
parser.add_argument("-o", "--output", help="Output file name")
|
||||
|
||||
# Index and input are mutually exclusive
|
||||
group = parser.add_mutually_exclusive_group()
|
||||
group.add_argument(
|
||||
"-x", "--index", action="store_true", help="Generate the index page"
|
||||
)
|
||||
group.add_argument("-i", "--input", help="YAML file name")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.verbose:
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
if args.input and not os.path.isfile(args.input):
|
||||
logging.warning("%s is not a valid file.", args.input)
|
||||
sys.exit(-1)
|
||||
|
||||
if not args.output:
|
||||
logging.error("No output file specified.")
|
||||
sys.exit(-1)
|
||||
|
||||
if os.path.isfile(args.output):
|
||||
logging.debug("%s already exists. Overwriting it.", args.output)
|
||||
|
||||
return args
|
||||
|
||||
|
||||
def parse_yaml_file(filename: str) -> str:
|
||||
"""Transform the YAML specified by filename into a rst-formmated string"""
|
||||
with open(filename, "r", encoding="utf-8") as spec_file:
|
||||
yaml_data = yaml.safe_load(spec_file)
|
||||
content = parse_yaml(yaml_data)
|
||||
|
||||
return content
|
||||
|
||||
|
||||
def write_to_rstfile(content: str, filename: str) -> None:
|
||||
"""Write the generated content into an RST file"""
|
||||
logging.debug("Saving RST file to %s", filename)
|
||||
|
||||
with open(filename, "w", encoding="utf-8") as rst_file:
|
||||
rst_file.write(content)
|
||||
|
||||
|
||||
def generate_main_index_rst(output: str) -> None:
|
||||
"""Generate the `networking_spec/index` content and write to the file"""
|
||||
lines = []
|
||||
|
||||
lines.append(rst_header())
|
||||
lines.append(rst_title("Netlink Specification"))
|
||||
lines.append(rst_toctree(1))
|
||||
|
||||
index_dir = os.path.dirname(output)
|
||||
logging.debug("Looking for .rst files in %s", index_dir)
|
||||
for filename in os.listdir(index_dir):
|
||||
if not filename.endswith(".rst") or filename == "index.rst":
|
||||
continue
|
||||
lines.append(f" {filename.replace('.rst', '')}\n")
|
||||
|
||||
logging.debug("Writing an index file at %s", output)
|
||||
write_to_rstfile("".join(lines), output)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Main function that reads the YAML files and generates the RST files"""
|
||||
|
||||
args = parse_arguments()
|
||||
|
||||
if args.input:
|
||||
logging.debug("Parsing %s", args.input)
|
||||
try:
|
||||
content = parse_yaml_file(os.path.join(args.input))
|
||||
except Exception as exception:
|
||||
logging.warning("Failed to parse %s.", args.input)
|
||||
logging.warning(exception)
|
||||
sys.exit(-1)
|
||||
|
||||
write_to_rstfile(content, args.output)
|
||||
|
||||
if args.index:
|
||||
# Generate the index RST file
|
||||
generate_main_index_rst(args.output)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
Loading…
Reference in New Issue
Block a user