Merge pull request #22083 from edx/nedbat/sphinx-openapi
Use sphinxcontrib-openapi to do the heavy lifting of Open API rendering.
This commit is contained in:
6
Makefile
6
Makefile
@@ -26,9 +26,11 @@ docs: api-docs guides ## build all the developer documentation for this reposito
|
||||
swagger: ## generate the swagger.yaml file
|
||||
DJANGO_SETTINGS_MODULE=docs.docs_settings python manage.py lms generate_swagger --generator-class=openedx.core.apidocs.ApiSchemaGenerator -o $(SWAGGER)
|
||||
|
||||
api-docs: swagger ## build the REST api docs
|
||||
api-docs-sphinx: swagger ## generate the sphinx source files for api-docs
|
||||
rm -f docs/api/gen/*
|
||||
python docs/sw2md.py $(SWAGGER) docs/api/gen
|
||||
python docs/sw2sphinxopenapi.py $(SWAGGER) docs/api/gen
|
||||
|
||||
api-docs: api-docs-sphinx ## build the REST api docs
|
||||
cd docs/api; make html
|
||||
|
||||
guides: ## build the developer guide docs
|
||||
|
||||
@@ -45,8 +45,7 @@ release = u''
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = [
|
||||
'recommonmark',
|
||||
'sphinx.ext.autosectionlabel',
|
||||
'sphinxcontrib.openapi',
|
||||
]
|
||||
|
||||
# Prefix document path to section labels, otherwise autogenerated labels would look like 'heading'
|
||||
@@ -59,7 +58,7 @@ templates_path = ['_templates']
|
||||
# The suffix(es) of source filenames.
|
||||
# You can specify multiple suffix as a list of string:
|
||||
#
|
||||
source_suffix = ['.rst', '.md']
|
||||
source_suffix = ['.rst']
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
@@ -7,8 +7,6 @@ TODO: What should go here?
|
||||
See all the endpoints at :doc:`The Endpoints <gen/index>`.
|
||||
|
||||
.. toctree::
|
||||
:glob:
|
||||
:maxdepth: 1
|
||||
:hidden:
|
||||
|
||||
gen/index
|
||||
gen/*
|
||||
|
||||
333
docs/sw2md.py
333
docs/sw2md.py
@@ -1,333 +0,0 @@
|
||||
"""Generate Markdown documents from an OpenAPI swagger file."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import contextlib
|
||||
import functools
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
# JSON Reference helpers
|
||||
|
||||
class JRefable(object):
|
||||
"""An object that can be indexed with JSON Pointers, and supports $ref."""
|
||||
def __init__(self, data, doc=None, ref=None):
|
||||
self.data = data
|
||||
self.doc = doc or data
|
||||
self.ref = ref or '/'
|
||||
self.name = None
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self.data)
|
||||
|
||||
def wrap(self, data, ref):
|
||||
if isinstance(data, dict):
|
||||
if '$ref' in data:
|
||||
ref = data['$ref']
|
||||
ret = JRefableObject(self.doc)[ref]
|
||||
ret.name = ref.split('/')[-1]
|
||||
return ret
|
||||
return JRefableObject(data, self.doc, ref)
|
||||
if isinstance(data, list):
|
||||
return JRefableArray(data, self.doc, ref)
|
||||
return data
|
||||
|
||||
|
||||
class JRefableObject(JRefable):
|
||||
"""Make a dictionary into a JSON Reference-capable object."""
|
||||
def __getitem__(self, jref):
|
||||
if jref.startswith('#/'):
|
||||
parts = jref[2:]
|
||||
data = self.doc
|
||||
ref = '/'
|
||||
else:
|
||||
parts = jref
|
||||
data = self.data
|
||||
ref = self.ref
|
||||
for part in parts.split('/'):
|
||||
try:
|
||||
data = data[part]
|
||||
except KeyError:
|
||||
raise KeyError("{!r} not in {!r} then {!r}".format(part, self.ref, jref))
|
||||
ref = ref + part + '/'
|
||||
return self.wrap(data, ref=ref)
|
||||
|
||||
def get(self, key, default=None):
|
||||
if key in self.data:
|
||||
return self.wrap(self.data[key], self.ref + key + '/')
|
||||
return default
|
||||
|
||||
def keys(self):
|
||||
return self.data.keys()
|
||||
|
||||
def items(self):
|
||||
for k, v in self.data.items():
|
||||
yield k, self.wrap(v, self.ref + k.replace('/', ':') + '/')
|
||||
|
||||
def __contains__(self, val):
|
||||
return val in self.data
|
||||
|
||||
|
||||
class JRefableArray(JRefable):
|
||||
"""Make a list into a JSON Reference-capable array."""
|
||||
def __getitem__(self, index):
|
||||
try:
|
||||
data = self.data[index]
|
||||
except IndexError:
|
||||
raise IndexError("{!r} not in {!r}".format(index, self.ref))
|
||||
return self.wrap(data, self.ref + str(index) + '/')
|
||||
|
||||
def __iter__(self):
|
||||
for i, elt in enumerate(self.data):
|
||||
yield self.wrap(elt, self.ref + str(i) + '/')
|
||||
|
||||
|
||||
class OutputFiles(object):
|
||||
"""A context manager to manage a series of files.
|
||||
|
||||
Use like this::
|
||||
|
||||
with OutputFiles() as outfiles:
|
||||
...
|
||||
if some_condition():
|
||||
f = outfiles.open("filename.txt", "w")
|
||||
|
||||
Each open will close the previously opened file, and the end of the with
|
||||
statement will close the last one.
|
||||
|
||||
"""
|
||||
def __init__(self):
|
||||
self.file = None
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, *args, **kwargs):
|
||||
if self.file:
|
||||
self.file.close()
|
||||
return False
|
||||
|
||||
def open(self, *args, **kwargs):
|
||||
if self.file:
|
||||
self.file.close()
|
||||
self.file = open(*args, **kwargs)
|
||||
return self.file
|
||||
|
||||
|
||||
sluggers = [
|
||||
r"^.*?/v\d+/[\w_-]+",
|
||||
r"^(/[\w_-]+){,3}",
|
||||
]
|
||||
|
||||
method_order = ['get', 'post', 'put', 'patch', 'delete', 'head', 'options']
|
||||
|
||||
|
||||
def method_ordered_items(method_data):
|
||||
keys = [k for k in method_order if k in method_data]
|
||||
for key in keys:
|
||||
yield key, method_data[key]
|
||||
|
||||
|
||||
class MarkdownWriter(object):
|
||||
"""Help write markdown, managing indentation and header nesting."""
|
||||
|
||||
def __init__(self, outfile):
|
||||
self.outfile = outfile
|
||||
self.cur_indent = 0
|
||||
|
||||
def print(self, text='', increase_headers=0):
|
||||
if increase_headers:
|
||||
text = re.sub(r"^#", "#" * (increase_headers + 1), text, flags=re.MULTILINE)
|
||||
if self.cur_indent:
|
||||
text = re.sub(r"^", " " * self.cur_indent, text, flags=re.MULTILINE)
|
||||
print(text, file=self.outfile)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def indent(self, spaces):
|
||||
old_indent = self.cur_indent
|
||||
self.cur_indent += spaces
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self.cur_indent = old_indent
|
||||
|
||||
|
||||
def convert_swagger_to_markdown(swagger_data, output_dir):
|
||||
"""Convert a swagger.yaml file to a series of markdown documents."""
|
||||
sw = JRefableObject(swagger_data)
|
||||
|
||||
if not os.path.exists(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
|
||||
with open(os.path.join(output_dir, 'index.md'), 'w') as index:
|
||||
indexmd = MarkdownWriter(index)
|
||||
indexmd.print("# {}\n".format(sw['info/title']))
|
||||
indexmd.print(sw['info/description'])
|
||||
indexmd.print()
|
||||
|
||||
with OutputFiles() as outfiles:
|
||||
slug = None
|
||||
|
||||
for uri, methods in sorted(sw['paths'].items()):
|
||||
for slugger in sluggers:
|
||||
m = re.search(slugger, uri)
|
||||
if m:
|
||||
new_slug = m.group()
|
||||
if new_slug != slug:
|
||||
slug = new_slug
|
||||
outfile = slug.strip('/').replace('/', '_') + '.md'
|
||||
outf = outfiles.open(os.path.join(output_dir, outfile), 'w')
|
||||
outmd = MarkdownWriter(outf)
|
||||
outmd.print("# {}\n".format(slug))
|
||||
indexmd.print("## {}\n".format(slug))
|
||||
break
|
||||
|
||||
common_params = methods.get('parameters', [])
|
||||
for method, op_data in method_ordered_items(methods):
|
||||
summary = ''
|
||||
if 'summary' in op_data:
|
||||
summary = " --- {}".format(op_data['summary'])
|
||||
indexmd.print("[{} {}]({}){}\n".format(method.upper(), uri, outfile, summary))
|
||||
write_one_method(outmd, method, uri, op_data, common_params)
|
||||
|
||||
|
||||
def write_one_method(outmd, method, uri, op_data, common_params):
|
||||
"""Write one entry (uri and method) to the markdown output."""
|
||||
outmd.print("\n## {} {}\n".format(method.upper(), uri))
|
||||
if 'summary' in op_data:
|
||||
outmd.print(op_data['summary'])
|
||||
outmd.print()
|
||||
outmd.print(op_data['description'], increase_headers=2)
|
||||
|
||||
params = list(op_data.get('parameters', []))
|
||||
params.extend(common_params)
|
||||
if params:
|
||||
outmd.print("\n### Parameters\n")
|
||||
for param in params:
|
||||
description = param.get('description', '').strip()
|
||||
if description:
|
||||
description = ": " + description
|
||||
where = param['in']
|
||||
required = param.get('required', False)
|
||||
required = "required" if required else "optional"
|
||||
if where == 'body':
|
||||
schema = param['schema']
|
||||
outmd.print("- **{}** (body, {}){}".format(
|
||||
param['name'],
|
||||
schema.name or schema['type'],
|
||||
description,
|
||||
))
|
||||
with outmd.indent(2):
|
||||
write_schema(outmd, schema)
|
||||
else:
|
||||
outmd.print("- **{}** ({}, {}, {}){}".format(
|
||||
param['name'],
|
||||
where,
|
||||
param['type'],
|
||||
required,
|
||||
description,
|
||||
))
|
||||
|
||||
responses = op_data.get('responses', [])
|
||||
if responses:
|
||||
outmd.print("\n### Responses\n")
|
||||
for status, response in sorted(responses.items()):
|
||||
description = response.get('description', '').strip()
|
||||
if description:
|
||||
description = ": " + description
|
||||
schema = response.get('schema')
|
||||
if schema:
|
||||
type_note = " ({})".format(type_name(schema))
|
||||
else:
|
||||
type_note = ""
|
||||
outmd.print("- **{}**{}{}".format(
|
||||
status,
|
||||
type_note,
|
||||
description,
|
||||
))
|
||||
if schema:
|
||||
with outmd.indent(2):
|
||||
write_schema(outmd, schema)
|
||||
|
||||
|
||||
def type_name(schema):
|
||||
"""What is the short type name for `schema`?"""
|
||||
if schema['type'] == 'object':
|
||||
return schema.name or schema.get('type') or "object"
|
||||
elif schema['type'] == 'array':
|
||||
item_type = type_name(schema['items'])
|
||||
return "array of " + item_type
|
||||
else:
|
||||
return schema['type']
|
||||
|
||||
|
||||
def write_schema(outmd, schema):
|
||||
"""Write a schema to the markdown output."""
|
||||
if schema['type'] == 'object':
|
||||
required = set(schema.get('required', ()))
|
||||
for prop_name, prop in sorted(schema['properties'].items()):
|
||||
attrs = []
|
||||
type = type_name(prop)
|
||||
if prop['type'] == 'array':
|
||||
item_type = prop['items']
|
||||
else:
|
||||
item_type = None
|
||||
attrs.append(type)
|
||||
if prop_name in required:
|
||||
attrs.append("required")
|
||||
else:
|
||||
attrs.append("optional")
|
||||
if 'format' in prop:
|
||||
attrs.append("format {}".format(prop["format"]))
|
||||
if 'pattern' in prop:
|
||||
attrs.append("pattern `{}`".format(prop["pattern"]))
|
||||
if 'minLength' in prop:
|
||||
attrs.append("min length {}".format(prop["minLength"]))
|
||||
if 'maxLength' in prop:
|
||||
attrs.append("max length {}".format(prop["maxLength"]))
|
||||
if 'minimum' in prop:
|
||||
attrs.append("minimum {}".format(prop["minimum"]))
|
||||
if 'maximum' in prop:
|
||||
attrs.append("maximum {}".format(prop["maximum"]))
|
||||
if prop.get('readOnly', False):
|
||||
attrs.append("read only")
|
||||
# TODO: enum
|
||||
# TODO: x-nullable
|
||||
|
||||
title = prop.get('title', '').strip()
|
||||
if title:
|
||||
title = ": " + title
|
||||
description = prop.get('description', '').strip()
|
||||
if description:
|
||||
if title:
|
||||
title = title + ". " + description
|
||||
else:
|
||||
title = ": " + description
|
||||
|
||||
outmd.print("- **{name}** ({attrs}){title}".format(
|
||||
name=prop_name,
|
||||
attrs=", ".join(attrs),
|
||||
title=title,
|
||||
))
|
||||
if item_type and item_type['type'] in ['object', 'array']:
|
||||
with outmd.indent(2):
|
||||
write_schema(outmd, item_type)
|
||||
elif schema['type'] == 'array':
|
||||
write_schema(outmd, schema['items'])
|
||||
else:
|
||||
raise ValueError("Don't understand schema type {!r} at {}".format(schema['type'], schema.ref))
|
||||
|
||||
|
||||
def main(args):
|
||||
with open(args[0]) as swyaml:
|
||||
swagger_data = yaml.safe_load(swyaml)
|
||||
convert_swagger_to_markdown(swagger_data, output_dir=args[1])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
127
docs/sw2sphinxopenapi.py
Normal file
127
docs/sw2sphinxopenapi.py
Normal file
@@ -0,0 +1,127 @@
|
||||
"""Generate ReST documents for sphinxcontrib-openapi from an OpenAPI swagger file.
|
||||
|
||||
This program reads an OpenAPI swagger file, and generates .rst files. Each
|
||||
file will render a segment of the swagger file, using sphinxcontrib-openapi.
|
||||
|
||||
An index.rst file is created listing all of the endpoints, linking to their
|
||||
detailed segment page.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import functools
|
||||
import itertools
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
def method_ordered_items(method_data):
|
||||
"""Yield the HTTP method items from method_data, in a canonical order."""
|
||||
for key in ['get', 'post', 'put', 'patch', 'delete', 'head', 'options']:
|
||||
if key in method_data:
|
||||
yield key, method_data[key]
|
||||
|
||||
|
||||
def rst_header(text, level, anchor=None):
|
||||
"""Create a ReST header, including a possible anchor.
|
||||
|
||||
Returns a multi-line string.
|
||||
|
||||
"""
|
||||
rst = []
|
||||
if anchor:
|
||||
rst.append(".. _{}:".format(anchor))
|
||||
rst.append("")
|
||||
char = " #=-"[level]
|
||||
if level == 1:
|
||||
rst.append(char * len(text))
|
||||
rst.append(text)
|
||||
rst.append(char * len(text))
|
||||
rst.append("")
|
||||
return "\n".join(rst)
|
||||
|
||||
|
||||
# Regexes that determine the segments. If one of these matches a URI, the
|
||||
# matched text is the segment for that endpoint.
|
||||
SEGMENTERS = [
|
||||
r"^.*?/v\d+/[\w_-]+",
|
||||
r"^(/[\w_-]+){,3}",
|
||||
]
|
||||
|
||||
|
||||
def segment_for_uri(uri):
|
||||
"""Determine the segment for an endpoint's URI."""
|
||||
for segmenter in SEGMENTERS:
|
||||
m = re.search(segmenter, uri)
|
||||
if m:
|
||||
return m.group()
|
||||
|
||||
return "default"
|
||||
|
||||
|
||||
def convert_swagger_to_sphinx(swagger_file, output_dir):
|
||||
"""Convert a swagger.yaml file to a series of Sphinx documents.
|
||||
|
||||
Args:
|
||||
swagger_file: the filename of the OpenAPI swagger file to read.
|
||||
output_dir: the directory where the .rst files should be written.
|
||||
|
||||
"""
|
||||
with open(swagger_file) as swyaml:
|
||||
swagger = yaml.safe_load(swyaml)
|
||||
|
||||
if not os.path.exists(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
|
||||
rel_swagger_path = os.path.relpath(swagger_file, output_dir)
|
||||
|
||||
with open(os.path.join(output_dir, 'index.rst'), 'w') as index:
|
||||
pr_index = functools.partial(print, file=index)
|
||||
pr_index(rst_header(swagger['info']['title'], level=1))
|
||||
pr_index(swagger['info']['description'])
|
||||
pr_index(textwrap.dedent("""\
|
||||
|
||||
.. toctree::
|
||||
:glob:
|
||||
:hidden:
|
||||
|
||||
*
|
||||
"""))
|
||||
|
||||
segment = None
|
||||
|
||||
uris = sorted(swagger['paths'])
|
||||
for segment, segment_uris in itertools.groupby(uris, key=segment_for_uri):
|
||||
|
||||
outfile = segment.strip('/').replace('/', '_')
|
||||
with open(os.path.join(output_dir, outfile + '.rst'), 'w') as outf:
|
||||
pr_outf = functools.partial(print, file=outf)
|
||||
pr_outf(rst_header(segment, level=1, anchor="gen_" + outfile))
|
||||
pr_outf(".. openapi:: {}".format(rel_swagger_path))
|
||||
pr_outf(" :format: markdown")
|
||||
pr_outf(" :include:")
|
||||
pr_outf(" {}.*".format(segment))
|
||||
|
||||
pr_index(rst_header(segment, level=2))
|
||||
|
||||
for uri in segment_uris:
|
||||
methods = swagger['paths'][uri]
|
||||
for method, op_data in method_ordered_items(methods):
|
||||
summary = ''
|
||||
if 'summary' in op_data:
|
||||
summary = " --- {}".format(op_data['summary'])
|
||||
pr_index(":ref:`{} {}<gen_{}>`{}\n".format(method.upper(), uri, outfile, summary))
|
||||
|
||||
|
||||
def main(args):
|
||||
convert_swagger_to_sphinx(swagger_file=args[0], output_dir=args[1])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(sys.argv[1:])
|
||||
@@ -130,7 +130,7 @@ paths:
|
||||
type: string
|
||||
- name: fields
|
||||
in: query
|
||||
description: "The fields to return: display_name, path.\n"
|
||||
description: 'The fields to return: display_name, path.'
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
@@ -156,8 +156,7 @@ paths:
|
||||
get:
|
||||
operationId: bookmarks_v1_bookmarks_read
|
||||
summary: Get a specific bookmark for a user.
|
||||
description: "# Example Requests\n\nGET /api/bookmarks/v1/bookmarks/{username},{usage_id}/?fields=display_name,path\n\
|
||||
\n"
|
||||
description: "# Example Requests\n\nGET /api/bookmarks/v1/bookmarks/{username},{usage_id}?fields=display_name,path"
|
||||
parameters: []
|
||||
responses:
|
||||
'200':
|
||||
|
||||
@@ -20,4 +20,4 @@ pyinotify # More efficient checking for runserver relo
|
||||
sphinx==1.8.5 # Pinned because 2.0.0 release requires Python '>=3.5' but current Python is 2.7.12
|
||||
vulture # Detects possible dead/unused code, used in scripts/find-dead-code.sh
|
||||
modernize # Used to make Python 2 code more modern with the intention of eventually porting it over to Python 3.
|
||||
recommonmark # To use markdown in sphinx
|
||||
sphinxcontrib-openapi[markdown]
|
||||
|
||||
@@ -56,7 +56,6 @@ click-log==0.3.2
|
||||
click==7.0
|
||||
code-annotations==0.3.2
|
||||
colorama==0.4.1
|
||||
commonmark==0.9.1 # via recommonmark
|
||||
configparser==4.0.2
|
||||
contextlib2==0.6.0.post1
|
||||
cookies==2.2.1
|
||||
@@ -280,7 +279,6 @@ pyyaml==5.1.2
|
||||
radon==4.0.0
|
||||
random2==1.0.1
|
||||
recommender-xblock==1.4.5
|
||||
recommonmark==0.6.0
|
||||
redis==2.10.6
|
||||
requests-oauthlib==1.1.0
|
||||
requests==2.22.0
|
||||
@@ -309,6 +307,7 @@ sorl-thumbnail==12.3
|
||||
sortedcontainers==2.1.0
|
||||
soupsieve==1.9.4
|
||||
sphinx==1.8.5
|
||||
sphinxcontrib-openapi[markdown]==0.5.0
|
||||
sphinxcontrib-websupport==1.1.2 # via sphinx
|
||||
sqlparse==0.3.0
|
||||
staff-graded-xblock==0.5
|
||||
|
||||
@@ -21,6 +21,7 @@ SKIP_DIRS = (
|
||||
'.pycharm_helpers',
|
||||
'common/static/xmodule/modules',
|
||||
'common/static/bundles',
|
||||
'docs',
|
||||
'perf_tests',
|
||||
'node_modules',
|
||||
'reports/diff_quality',
|
||||
|
||||
Reference in New Issue
Block a user