Move templating into scripts dir
There's no real need for this to be at the top level.
This commit is contained in:
parent
d9285cf5b5
commit
a38d4fc68e
18 changed files with 17 additions and 18 deletions
|
@ -52,7 +52,7 @@ func main() {
|
|||
|
||||
walker := makeWalker(dir, w)
|
||||
paths := []string{"api", "changelogs", "event-schemas", "scripts",
|
||||
"specification", "templating"}
|
||||
"specification"}
|
||||
|
||||
for _, p := range paths {
|
||||
filepath.Walk(path.Join(dir, p), walker)
|
||||
|
|
|
@ -30,7 +30,7 @@ import yaml
|
|||
|
||||
|
||||
scripts_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
templating_dir = os.path.join(os.path.dirname(scripts_dir), "templating")
|
||||
templating_dir = os.path.join(scripts_dir, "templating")
|
||||
api_dir = os.path.join(os.path.dirname(scripts_dir), "api")
|
||||
|
||||
sys.path.insert(0, templating_dir)
|
||||
|
|
|
@ -273,7 +273,7 @@ def addAnchors(path):
|
|||
|
||||
def run_through_template(input_files, set_verbose, substitutions):
|
||||
args = [
|
||||
'python', 'build.py',
|
||||
'python', script_dir+'/templating/build.py',
|
||||
"-o", tmp_dir,
|
||||
"-i", "matrix_templates",
|
||||
]
|
||||
|
@ -288,10 +288,7 @@ def run_through_template(input_files, set_verbose, substitutions):
|
|||
|
||||
log("EXEC: %s" % " ".join(args))
|
||||
log(" ==== build.py output ==== ")
|
||||
subprocess.check_call(
|
||||
args,
|
||||
cwd=os.path.join(docs_dir, "templating"),
|
||||
)
|
||||
subprocess.check_call(args)
|
||||
|
||||
"""
|
||||
Extract and resolve groups for the given target in the given targets listing.
|
||||
|
|
88
scripts/templating/README.md
Normal file
88
scripts/templating/README.md
Normal file
|
@ -0,0 +1,88 @@
|
|||
This folder contains the templates and a home-brewed templating system called
|
||||
Batesian for creating the spec. Batesian uses the templating system Jinja2 in
|
||||
Python.
|
||||
|
||||
Installation
|
||||
------------
|
||||
```
|
||||
$ pip install Jinja2
|
||||
```
|
||||
|
||||
Running
|
||||
-------
|
||||
To pass arbitrary files (not limited to RST) through the templating system:
|
||||
```
|
||||
$ python build.py -i matrix_templates /random/file/path/here.rst
|
||||
```
|
||||
|
||||
The template output can be found at ``out/here.rst``. For a full list of
|
||||
options, type ``python build.py --help``.
|
||||
|
||||
Developing
|
||||
----------
|
||||
|
||||
### Sections and Units
|
||||
Batesian is built around the concept of Sections and Units. Sections are strings
|
||||
which will be inserted into the provided document. Every section has a unique
|
||||
key name which is the template variable that it represents. Units are arbitrary
|
||||
python data. They are also represented by unique key names.
|
||||
|
||||
### Adding template variables
|
||||
If you want to add a new template variable e.g. `{{foo_bar}}` which is replaced
|
||||
with the text `foobar`, you need to add a new Section:
|
||||
|
||||
- Open `matrix_templates/sections.py`.
|
||||
- Add a new function to `MatrixSections` called `render_foo_bar`. The function
|
||||
name after `render_` determines the template variable name, and the return
|
||||
value of this function determines what will be inserted.
|
||||
|
||||
```python
|
||||
def render_foo_bar(self):
|
||||
return "foobar"
|
||||
```
|
||||
- Run `build.py` with a file which has `{{foo_bar}}` in it, and it will be
|
||||
replaced with `foobar`.
|
||||
|
||||
### Adding data for template variables
|
||||
If you want to expose arbitrary data which can be used by `MatrixSections`, you
|
||||
need to add a new Unit:
|
||||
|
||||
- Open `matrix_templates/units.py`.
|
||||
- Add a new function to `MatrixUnits` called `load_some_data`. Similar to
|
||||
sections, the function name after `load_` determines the unit name, and the
|
||||
return value of this function determines the value of the unit.
|
||||
|
||||
```python
|
||||
def load_some_data(self):
|
||||
return {
|
||||
"data": "this could be JSON from file from json.loads()",
|
||||
"processed_data": "this data may have helper keys added",
|
||||
"types": "it doesn't even need to be a dict. Whatever you want!"
|
||||
}
|
||||
```
|
||||
- In `MatrixSections`, you can now call `self.units.get("some_data")` to
|
||||
retrieve the value you returned.
|
||||
|
||||
### Using Jinja templates
|
||||
Sections can use Jinja templates to return text. Batesian will attempt to load
|
||||
all templates from `matrix_templates/templates/`. These can be accessed in
|
||||
Section code via `template = self.env.get_template("name_of_template.tmpl")`. At
|
||||
this point, the `template` is just a standard `jinja2.Template`. In fact,
|
||||
`self.env` is just a `jinja2.Environment`.
|
||||
|
||||
### Debugging
|
||||
If you don't know why your template isn't behaving as you'd expect, or you just
|
||||
want to add some informative logging, use `self.log` in either the Sections
|
||||
class or Units class. You'll need to add `-v` to `build.py` for these lines to
|
||||
show.
|
||||
|
||||
About
|
||||
-----
|
||||
|
||||
Batesian was designed to be extremely simple and just use Python as its language
|
||||
rather than another intermediary language like some other templating systems.
|
||||
This provides a **lot** of flexibility since you aren't contrained by a
|
||||
templating language. Batesian provides a thin abstraction over Jinja which is
|
||||
very useful when you want to do random bits of processing then dump the output
|
||||
into a Jinja template. Its name is derived from Batesian mimicry due to how the
|
||||
templating system uses Python as its language, but in a harmless way.
|
39
scripts/templating/batesian/__init__.py
Normal file
39
scripts/templating/batesian/__init__.py
Normal file
|
@ -0,0 +1,39 @@
|
|||
# Copyright 2016 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from sets import Set
|
||||
|
||||
|
||||
class AccessKeyStore(object):
|
||||
"""Storage for arbitrary data. Monitors get calls so we know if they
|
||||
were used or not."""
|
||||
|
||||
def __init__(self, existing_data=None):
|
||||
if not existing_data:
|
||||
existing_data = {}
|
||||
self.data = existing_data
|
||||
self.accessed_set = Set()
|
||||
|
||||
def keys(self):
|
||||
return self.data.keys()
|
||||
|
||||
def add(self, key, unit_dict):
|
||||
self.data[key] = unit_dict
|
||||
|
||||
def get(self, key):
|
||||
self.accessed_set.add(key)
|
||||
return self.data[key]
|
||||
|
||||
def get_unaccessed_set(self):
|
||||
data_list = Set(self.data.keys())
|
||||
return data_list - self.accessed_set
|
77
scripts/templating/batesian/sections.py
Normal file
77
scripts/templating/batesian/sections.py
Normal file
|
@ -0,0 +1,77 @@
|
|||
# Copyright 2016 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Parent class for writing sections."""
|
||||
import inspect
|
||||
import os
|
||||
|
||||
|
||||
class Sections(object):
|
||||
"""A class which creates sections for each method starting with "render_".
|
||||
The key for the section is the text after "render_"
|
||||
e.g. "render_room_events" has the section key "room_events"
|
||||
"""
|
||||
|
||||
def __init__(self, env, units, debug=False):
|
||||
self.env = env
|
||||
self.units = units
|
||||
self.debug = debug
|
||||
|
||||
def log(self, text):
|
||||
if self.debug:
|
||||
print "batesian:sections: %s" % text
|
||||
|
||||
def get_sections(self):
|
||||
render_list = inspect.getmembers(self, predicate=inspect.ismethod)
|
||||
section_dict = {}
|
||||
for (func_name, func) in render_list:
|
||||
if not func_name.startswith("render_"):
|
||||
continue
|
||||
section_key = func_name[len("render_"):]
|
||||
self.log("Generating section '%s'" % section_key)
|
||||
section = func()
|
||||
if isinstance(section, basestring):
|
||||
if section_key in section_dict:
|
||||
raise Exception(
|
||||
("%s : Section %s already exists. It must have been " +
|
||||
"generated dynamically. Check which render_ methods " +
|
||||
"return a dict.") %
|
||||
(func_name, section_key)
|
||||
)
|
||||
section_dict[section_key] = section
|
||||
self.log(
|
||||
" Generated. Snippet => %s" % section[:60].replace("\n","")
|
||||
)
|
||||
elif isinstance(section, dict):
|
||||
self.log(" Generated multiple sections:")
|
||||
for (k, v) in section.iteritems():
|
||||
if not isinstance(k, basestring) or not isinstance(v, basestring):
|
||||
raise Exception(
|
||||
("Method %s returned multiple sections as a dict but " +
|
||||
"expected the dict elements to be strings but they aren't.") %
|
||||
(func_name, )
|
||||
)
|
||||
if k in section_dict:
|
||||
raise Exception(
|
||||
"%s tried to produce section %s which already exists." %
|
||||
(func_name, k)
|
||||
)
|
||||
section_dict[k] = v
|
||||
self.log(
|
||||
" %s => %s" % (k, v[:60].replace("\n",""))
|
||||
)
|
||||
else:
|
||||
raise Exception(
|
||||
"Section function '%s' didn't return a string/dict!" % func_name
|
||||
)
|
||||
return section_dict
|
59
scripts/templating/batesian/units.py
Normal file
59
scripts/templating/batesian/units.py
Normal file
|
@ -0,0 +1,59 @@
|
|||
# Copyright 2016 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Parent class for writing units."""
|
||||
import inspect
|
||||
|
||||
|
||||
class Units(object):
|
||||
|
||||
@staticmethod
|
||||
def prop(obj, path):
|
||||
# Helper method to extract nested property values
|
||||
nested_keys = path.split("/")
|
||||
val = obj
|
||||
for key in nested_keys:
|
||||
val = val.get(key, {})
|
||||
return val
|
||||
|
||||
|
||||
def __init__(self, debug=False, substitutions=None):
|
||||
self.debug = debug
|
||||
|
||||
if substitutions is None:
|
||||
self.substitutions = {}
|
||||
else:
|
||||
self.substitutions = substitutions
|
||||
|
||||
def log(self, text):
|
||||
if self.debug:
|
||||
func_name = ""
|
||||
trace = inspect.stack()
|
||||
if len(trace) > 1 and len(trace[1]) > 2:
|
||||
func_name = trace[1][3] + ":"
|
||||
print "batesian:units:%s %s" % (func_name, text)
|
||||
|
||||
def get_units(self, debug=False):
|
||||
unit_list = inspect.getmembers(self, predicate=inspect.ismethod)
|
||||
unit_dict = {}
|
||||
for (func_name, func) in unit_list:
|
||||
if not func_name.startswith("load_"):
|
||||
continue
|
||||
unit_key = func_name[len("load_"):]
|
||||
if len(inspect.getargs(func.func_code).args) > 1:
|
||||
unit_dict[unit_key] = func(self.substitutions)
|
||||
else:
|
||||
unit_dict[unit_key] = func()
|
||||
self.log("Generated unit '%s'" % unit_key)
|
||||
|
||||
return unit_dict
|
285
scripts/templating/build.py
Executable file
285
scripts/templating/build.py
Executable file
|
@ -0,0 +1,285 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# Copyright 2016 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""
|
||||
Batesian: A simple templating system using Jinja.
|
||||
|
||||
Architecture
|
||||
============
|
||||
|
||||
INPUT FILE --------+
|
||||
+-------+ +----------+ |
|
||||
| units |-+ | sections |-+ V
|
||||
+-------+ |-+ == used to create ==> +----------- | == provides vars to ==> Jinja
|
||||
+-------+ | +----------+ |
|
||||
+--------+ V
|
||||
RAW DATA (e.g. json) Blobs of text OUTPUT FILE
|
||||
|
||||
Units
|
||||
=====
|
||||
Units are random bits of unprocessed data, e.g. schema JSON files. Anything can
|
||||
be done to them, from processing it with Jinja to arbitrary python processing.
|
||||
They are typically dicts.
|
||||
|
||||
Sections
|
||||
========
|
||||
Sections are strings, typically short segments of RST. They will be dropped in
|
||||
to the provided input file based on their section key name (template var)
|
||||
They typically use a combination of templates + units to construct bits of RST.
|
||||
|
||||
Input File
|
||||
==========
|
||||
The input file is a text file which is passed through Jinja along with the
|
||||
section keys as template variables.
|
||||
|
||||
Processing
|
||||
==========
|
||||
- Execute all unit functions to load units into memory and process them.
|
||||
- Execute all section functions (which can now be done because the units exist)
|
||||
- Process the input file through Jinja, giving it the sections as template vars.
|
||||
"""
|
||||
from batesian import AccessKeyStore
|
||||
|
||||
from jinja2 import Environment, FileSystemLoader, StrictUndefined, Template, meta
|
||||
from argparse import ArgumentParser, FileType
|
||||
import importlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from textwrap import TextWrapper
|
||||
|
||||
from matrix_templates.units import TypeTableRow
|
||||
|
||||
|
||||
def create_from_template(template, sections):
|
||||
return template.render(sections)
|
||||
|
||||
def check_unaccessed(name, store):
|
||||
unaccessed_keys = store.get_unaccessed_set()
|
||||
if len(unaccessed_keys) > 0:
|
||||
log("Found %s unused %s keys." % (len(unaccessed_keys), name))
|
||||
log(unaccessed_keys)
|
||||
|
||||
def main(input_module, files=None, out_dir=None, verbose=False, substitutions={}):
|
||||
if out_dir and not os.path.exists(out_dir):
|
||||
os.makedirs(out_dir)
|
||||
|
||||
in_mod = importlib.import_module(input_module)
|
||||
|
||||
# add a template filter to produce pretty pretty JSON
|
||||
def jsonify(input, indent=None, pre_whitespace=0):
|
||||
code = json.dumps(input, indent=indent, sort_keys=True)
|
||||
if pre_whitespace:
|
||||
code = code.replace("\n", ("\n" +" "*pre_whitespace))
|
||||
|
||||
return code
|
||||
|
||||
def indent_block(input, indent):
|
||||
return input.replace("\n", ("\n" + " "*indent))
|
||||
|
||||
def indent(input, indent):
|
||||
return " "*indent + input
|
||||
|
||||
def wrap(input, wrap=80, initial_indent=""):
|
||||
if len(input) == 0:
|
||||
return initial_indent
|
||||
# TextWrapper collapses newlines into single spaces; we do our own
|
||||
# splitting on newlines to prevent this, so that newlines can actually
|
||||
# be intentionally inserted in text.
|
||||
input_lines = input.split('\n\n')
|
||||
wrapper = TextWrapper(initial_indent=initial_indent, width=wrap)
|
||||
output_lines = [wrapper.fill(line) for line in input_lines]
|
||||
|
||||
for i in range(len(output_lines)):
|
||||
line = output_lines[i]
|
||||
in_bullet = line.startswith("- ")
|
||||
if in_bullet:
|
||||
output_lines[i] = line.replace("\n", "\n " + initial_indent)
|
||||
|
||||
return '\n\n'.join(output_lines)
|
||||
|
||||
def fieldwidths(input, keys, defaults=[], default_width=15):
|
||||
"""
|
||||
A template filter to help in the generation of tables.
|
||||
|
||||
Given a list of rows, returns a list giving the maximum length of the
|
||||
values in each column.
|
||||
|
||||
:param list[TypeTableRow|dict[str,str]] input:
|
||||
a list of rows
|
||||
:param list[str] keys: the keys corresponding to the table columns
|
||||
:param list[int] defaults: for each column, the default column width.
|
||||
:param int default_width: if ``defaults`` is shorter than ``keys``, this
|
||||
will be used as a fallback
|
||||
"""
|
||||
def getrowattribute(row, k):
|
||||
# the row may be a dict (particularly the title row, which is
|
||||
# generated by the template
|
||||
if not isinstance(row, TypeTableRow):
|
||||
return row[k]
|
||||
return getattr(row, k)
|
||||
|
||||
def colwidth(key, default):
|
||||
rowwidths = (len(getrowattribute(row, key)) for row in input)
|
||||
return reduce(max, rowwidths,
|
||||
default if default is not None else default_width)
|
||||
|
||||
results = map(colwidth, keys, defaults)
|
||||
return results
|
||||
|
||||
# make Jinja aware of the templates and filters
|
||||
env = Environment(
|
||||
loader=FileSystemLoader(in_mod.exports["templates"]),
|
||||
undefined=StrictUndefined
|
||||
)
|
||||
env.filters["jsonify"] = jsonify
|
||||
env.filters["indent"] = indent
|
||||
env.filters["indent_block"] = indent_block
|
||||
env.filters["wrap"] = wrap
|
||||
env.filters["fieldwidths"] = fieldwidths
|
||||
|
||||
# load up and parse the lowest single units possible: we don't know or care
|
||||
# which spec section will use it, we just need it there in memory for when
|
||||
# they want it.
|
||||
units = AccessKeyStore(
|
||||
existing_data=in_mod.exports["units"](
|
||||
debug=verbose,
|
||||
substitutions=substitutions,
|
||||
).get_units()
|
||||
)
|
||||
|
||||
# use the units to create RST sections
|
||||
sections = in_mod.exports["sections"](env, units, debug=verbose).get_sections()
|
||||
|
||||
# print out valid section keys if no file supplied
|
||||
if not files:
|
||||
print "\nValid template variables:"
|
||||
for key in sections.keys():
|
||||
sec_text = "" if (len(sections[key]) > 75) else (
|
||||
"(Value: '%s')" % sections[key]
|
||||
)
|
||||
sec_info = "%s characters" % len(sections[key])
|
||||
if sections[key].count("\n") > 0:
|
||||
sec_info += ", %s lines" % sections[key].count("\n")
|
||||
print " %s" % key
|
||||
print " %s %s" % (sec_info, sec_text)
|
||||
return
|
||||
|
||||
# check the input files and substitute in sections where required
|
||||
for input_filename in files:
|
||||
output_filename = os.path.join(out_dir,
|
||||
os.path.basename(input_filename))
|
||||
process_file(env, sections, input_filename, output_filename)
|
||||
|
||||
check_unaccessed("units", units)
|
||||
|
||||
def process_file(env, sections, filename, output_filename):
|
||||
log("Parsing input template: %s" % filename)
|
||||
|
||||
with open(filename, "r") as file_stream:
|
||||
temp_str = file_stream.read().decode("utf-8")
|
||||
|
||||
# do sanity checking on the template to make sure they aren't reffing things
|
||||
# which will never be replaced with a section.
|
||||
ast = env.parse(temp_str)
|
||||
template_vars = meta.find_undeclared_variables(ast)
|
||||
unused_vars = [var for var in template_vars if var not in sections]
|
||||
if len(unused_vars) > 0:
|
||||
raise Exception(
|
||||
"You have {{ variables }} which are not found in sections: %s" %
|
||||
(unused_vars,)
|
||||
)
|
||||
# process the template
|
||||
temp = Template(temp_str)
|
||||
output = create_from_template(temp, sections)
|
||||
|
||||
# Do these substitutions outside of the ordinary templating system because
|
||||
# we want them to apply to things like the underlying swagger used to
|
||||
# generate the templates, not just the top-level sections.
|
||||
for old, new in substitutions.items():
|
||||
output = output.replace(old, new)
|
||||
|
||||
with open(output_filename, "w") as f:
|
||||
f.write(output.encode("utf-8"))
|
||||
log("Output file for: %s" % output_filename)
|
||||
|
||||
|
||||
def log(line):
|
||||
print "batesian: %s" % line
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = ArgumentParser(
|
||||
"Processes a file (typically .rst) through Jinja to replace templated "+
|
||||
"areas with section information from the provided input module. For a "+
|
||||
"list of possible template variables, add --show-template-vars."
|
||||
)
|
||||
parser.add_argument(
|
||||
"files", nargs="+",
|
||||
help="The input files to process. These will be passed through Jinja "+
|
||||
"then output under the same name to the output directory."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--input", "-i",
|
||||
help="The python module (not file) which contains the sections/units "+
|
||||
"classes. This module must have an 'exports' dict which has "+
|
||||
"{ 'units': UnitClass, 'sections': SectionClass, "+
|
||||
"'templates': 'template/dir' }"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--out-directory", "-o", help="The directory to output the file to."+
|
||||
" Default: /out",
|
||||
default="out"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--show-template-vars", "-s", action="store_true",
|
||||
help="Show a list of all possible variables (sections) you can use in"+
|
||||
" the input file."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--verbose", "-v", action="store_true",
|
||||
help="Turn on verbose mode."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--substitution", action="append",
|
||||
help="Substitutions to apply to the generated output, of form NEEDLE=REPLACEMENT.",
|
||||
default=[],
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.verbose:
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
else:
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
if not args.input:
|
||||
raise Exception("Missing [i]nput python module.")
|
||||
|
||||
if (args.show_template_vars):
|
||||
main(args.input, verbose=args.verbose)
|
||||
sys.exit(0)
|
||||
|
||||
substitutions = {}
|
||||
for substitution in args.substitution:
|
||||
parts = substitution.split("=", 1)
|
||||
if len(parts) != 2:
|
||||
raise Exception("Invalid substitution")
|
||||
substitutions[parts[0]] = parts[1]
|
||||
|
||||
main(
|
||||
args.input, files=args.files, out_dir=args.out_directory,
|
||||
substitutions=substitutions, verbose=args.verbose
|
||||
)
|
22
scripts/templating/matrix_templates/__init__.py
Normal file
22
scripts/templating/matrix_templates/__init__.py
Normal file
|
@ -0,0 +1,22 @@
|
|||
# Copyright 2016 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from sections import MatrixSections
|
||||
from units import MatrixUnits
|
||||
import os
|
||||
|
||||
exports = {
|
||||
"units": MatrixUnits,
|
||||
"sections": MatrixSections,
|
||||
"templates": os.path.join(os.path.dirname(os.path.abspath(__file__)), "templates")
|
||||
}
|
191
scripts/templating/matrix_templates/sections.py
Normal file
191
scripts/templating/matrix_templates/sections.py
Normal file
|
@ -0,0 +1,191 @@
|
|||
# Copyright 2016 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Contains all the sections for the spec."""
|
||||
from batesian import AccessKeyStore
|
||||
from batesian.sections import Sections
|
||||
import inspect
|
||||
import json
|
||||
import os
|
||||
|
||||
|
||||
class MatrixSections(Sections):
|
||||
|
||||
# pass through git ver so it'll be dropped in the input file
|
||||
def render_git_version(self):
|
||||
return self.units.get("git_version")["string"]
|
||||
|
||||
def render_git_rev(self):
|
||||
return self.units.get("git_version")["revision"]
|
||||
|
||||
def render_client_server_changelog(self):
|
||||
changelogs = self.units.get("changelogs")
|
||||
return changelogs["client_server"]
|
||||
|
||||
def _render_events(self, filterFn, sortFn):
|
||||
template = self.env.get_template("events.tmpl")
|
||||
examples = self.units.get("event_examples")
|
||||
schemas = self.units.get("event_schemas")
|
||||
subtitle_title_char = self.units.get("spec_targets")[
|
||||
"relative_title_styles"
|
||||
]["subtitle"]
|
||||
sections = []
|
||||
for event_name in sortFn(schemas):
|
||||
if not filterFn(event_name):
|
||||
continue
|
||||
sections.append(template.render(
|
||||
examples=examples[event_name],
|
||||
event=schemas[event_name],
|
||||
title_kind=subtitle_title_char
|
||||
))
|
||||
return "\n\n".join(sections)
|
||||
|
||||
def _render_http_api_group(self, group, sortPathList=None):
|
||||
template = self.env.get_template("http-api.tmpl")
|
||||
http_api = self.units.get("swagger_apis")[group]["__meta"]
|
||||
subtitle_title_char = self.units.get("spec_targets")[
|
||||
"relative_title_styles"
|
||||
]["subtitle"]
|
||||
sections = []
|
||||
endpoints = []
|
||||
if sortPathList:
|
||||
# list of substrings to sort by
|
||||
sorted_endpoints = []
|
||||
for path_substr in sortPathList:
|
||||
for e in http_api["endpoints"]:
|
||||
if path_substr in e["path"]:
|
||||
sorted_endpoints.append(e) # could have multiple
|
||||
# dump rest
|
||||
rest = [
|
||||
e for e in http_api["endpoints"] if e not in sorted_endpoints
|
||||
]
|
||||
endpoints = sorted_endpoints + rest
|
||||
else:
|
||||
# sort alphabetically based on path
|
||||
endpoints = http_api["endpoints"]
|
||||
|
||||
for endpoint in endpoints:
|
||||
sections.append(template.render(
|
||||
endpoint=endpoint,
|
||||
title_kind=subtitle_title_char
|
||||
))
|
||||
return "\n\n".join(sections)
|
||||
|
||||
|
||||
# Special function: Returning a dict will specify multiple sections where
|
||||
# the key is the section name and the value is the value of the section
|
||||
def render_group_http_apis(self):
|
||||
# map all swagger_apis to the form $GROUP_http_api
|
||||
swagger_groups = self.units.get("swagger_apis").keys()
|
||||
renders = {}
|
||||
for group in swagger_groups:
|
||||
sortFnOrPathList = None
|
||||
if group == "presence_cs":
|
||||
sortFnOrPathList = ["status"]
|
||||
elif group == "profile_cs":
|
||||
sortFnOrPathList=["displayname", "avatar_url"]
|
||||
renders[group + "_http_api"] = self._render_http_api_group(
|
||||
group, sortFnOrPathList
|
||||
)
|
||||
return renders
|
||||
|
||||
# Special function: Returning a dict will specify multiple sections where
|
||||
# the key is the section name and the value is the value of the section
|
||||
def render_group_events(self):
|
||||
# map all event schemata to the form $EVENTTYPE_event with s/./_/g
|
||||
# e.g. m_room_topic_event
|
||||
schemas = self.units.get("event_schemas")
|
||||
renders = {}
|
||||
for event_type in schemas:
|
||||
renders[event_type.replace(".", "_") + "_event"] = self._render_events(
|
||||
lambda x: x == event_type, sorted
|
||||
)
|
||||
return renders
|
||||
|
||||
def render_room_events(self):
|
||||
def filterFn(eventType):
|
||||
return (
|
||||
eventType.startswith("m.room") and
|
||||
not eventType.startswith("m.room.message#m.")
|
||||
)
|
||||
return self._render_events(filterFn, sorted)
|
||||
|
||||
def render_msgtype_events(self):
|
||||
template = self.env.get_template("msgtypes.tmpl")
|
||||
examples = self.units.get("event_examples")
|
||||
schemas = self.units.get("event_schemas")
|
||||
subtitle_title_char = self.units.get("spec_targets")[
|
||||
"relative_title_styles"
|
||||
]["subtitle"]
|
||||
sections = []
|
||||
msgtype_order = [
|
||||
"m.room.message#m.text", "m.room.message#m.emote",
|
||||
"m.room.message#m.notice", "m.room.message#m.image",
|
||||
"m.room.message#m.file"
|
||||
]
|
||||
other_msgtypes = [
|
||||
k for k in schemas.keys() if k.startswith("m.room.message#") and
|
||||
k not in msgtype_order
|
||||
]
|
||||
for event_name in (msgtype_order + other_msgtypes):
|
||||
if not event_name.startswith("m.room.message#m."):
|
||||
continue
|
||||
sections.append(template.render(
|
||||
example=examples[event_name][0],
|
||||
event=schemas[event_name],
|
||||
title_kind=subtitle_title_char
|
||||
))
|
||||
return "\n\n".join(sections)
|
||||
|
||||
def render_voip_events(self):
|
||||
def filterFn(eventType):
|
||||
return eventType.startswith("m.call")
|
||||
def sortFn(eventTypes):
|
||||
ordering = [
|
||||
"m.call.invite", "m.call.candidates", "m.call.answer",
|
||||
"m.call.hangup"
|
||||
]
|
||||
rest = [
|
||||
k for k in eventTypes if k not in ordering
|
||||
]
|
||||
return ordering + rest
|
||||
return self._render_events(filterFn, sortFn)
|
||||
|
||||
def render_presence_events(self):
|
||||
def filterFn(eventType):
|
||||
return eventType.startswith("m.presence")
|
||||
return self._render_events(filterFn, sorted)
|
||||
|
||||
def _render_ce_type(self, type):
|
||||
template = self.env.get_template("common-event-fields.tmpl")
|
||||
ce_types = self.units.get("common_event_fields")
|
||||
subtitle_title_char = self.units.get("spec_targets")[
|
||||
"relative_title_styles"
|
||||
]["subtitle"]
|
||||
return template.render(
|
||||
common_event=ce_types[type], title_kind=subtitle_title_char
|
||||
)
|
||||
|
||||
def render_common_event_fields(self):
|
||||
return self._render_ce_type("event")
|
||||
|
||||
def render_common_room_event_fields(self):
|
||||
return self._render_ce_type("room_event")
|
||||
|
||||
def render_common_state_event_fields(self):
|
||||
return self._render_ce_type("state_event")
|
||||
|
||||
def render_apis(self):
|
||||
template = self.env.get_template("apis.tmpl")
|
||||
apis = self.units.get("apis")
|
||||
return template.render(apis=apis)
|
4
scripts/templating/matrix_templates/templates/apis.tmpl
Normal file
4
scripts/templating/matrix_templates/templates/apis.tmpl
Normal file
|
@ -0,0 +1,4 @@
|
|||
{% import 'tables.tmpl' as tables -%}
|
||||
|
||||
{{ tables.paramtable(apis.rows, ["API", "Version", "Description"]) }}
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
{% import 'tables.tmpl' as tables -%}
|
||||
|
||||
{{common_event.title}} Fields
|
||||
{{(7 + common_event.title | length) * title_kind}}
|
||||
|
||||
{{common_event.desc}}
|
||||
|
||||
{% for table in common_event.tables %}
|
||||
{{"``"+table.title+"``" if table.title else "" }}
|
||||
|
||||
{{ tables.paramtable(table.rows, ["Key", "Type", "Description"]) }}
|
||||
|
||||
{% endfor %}
|
26
scripts/templating/matrix_templates/templates/events.tmpl
Normal file
26
scripts/templating/matrix_templates/templates/events.tmpl
Normal file
|
@ -0,0 +1,26 @@
|
|||
{% import 'tables.tmpl' as tables -%}
|
||||
|
||||
``{{event.type}}``
|
||||
{{(4 + event.type | length) * title_kind}}
|
||||
|
||||
{% if (event.typeof | length) %}
|
||||
*{{event.typeof}}*
|
||||
{{event.typeof_info}}
|
||||
|
||||
{% endif -%}
|
||||
|
||||
{{event.desc}}
|
||||
|
||||
{% for table in event.content_fields %}
|
||||
{{"``"+table.title+"``" if table.title else "" }}
|
||||
|
||||
{{ tables.paramtable(table.rows, [(table.title or "Content") ~ " Key", "Type", "Description"]) }}
|
||||
|
||||
{% endfor %}
|
||||
Example{% if examples | length > 1 %}s{% endif %}:
|
||||
|
||||
{% for example in examples %}
|
||||
.. code:: json
|
||||
|
||||
{{example | jsonify(4, 4)}}
|
||||
{% endfor %}
|
74
scripts/templating/matrix_templates/templates/http-api.tmpl
Normal file
74
scripts/templating/matrix_templates/templates/http-api.tmpl
Normal file
|
@ -0,0 +1,74 @@
|
|||
{% import 'tables.tmpl' as tables -%}
|
||||
|
||||
``{{endpoint.method}} {{endpoint.path}}``
|
||||
{{(5 + (endpoint.path | length) + (endpoint.method | length)) * title_kind}}
|
||||
{% if "deprecated" in endpoint and endpoint.deprecated -%}
|
||||
.. WARNING::
|
||||
This API is deprecated and will be removed from a future release.
|
||||
|
||||
{% endif -%}
|
||||
|
||||
{{endpoint.desc}}
|
||||
|
||||
{{":Rate-limited: Yes." if endpoint.rate_limited else "" }}
|
||||
{{":Requires auth: Yes." if endpoint.requires_auth else "" }}
|
||||
|
||||
Request format:
|
||||
{% if (endpoint.req_param_by_loc | length) %}
|
||||
{{ tables.split_paramtable(endpoint.req_param_by_loc) }}
|
||||
{% if (endpoint.req_body_tables) %}
|
||||
{% for table in endpoint.req_body_tables -%}
|
||||
{{"``"+table.title+"``" if table.title else "" }}
|
||||
{{ tables.paramtable(table.rows) }}
|
||||
{% endfor -%}
|
||||
{% endif -%}
|
||||
|
||||
{% else %}
|
||||
`No parameters`
|
||||
{% endif %}
|
||||
|
||||
{% if endpoint.res_headers is not none -%}
|
||||
Response headers:
|
||||
|
||||
{{ tables.paramtable(endpoint.res_headers.rows) }}
|
||||
{% endif -%}
|
||||
|
||||
{% if endpoint.res_tables|length > 0 -%}
|
||||
Response format:
|
||||
|
||||
{% for table in endpoint.res_tables -%}
|
||||
{{"``"+table.title+"``" if table.title else "" }}
|
||||
|
||||
{{ tables.paramtable(table.rows) }}
|
||||
|
||||
{% endfor %}
|
||||
{% endif -%}
|
||||
|
||||
Example request:
|
||||
|
||||
.. code:: http
|
||||
|
||||
{{endpoint.example.req | indent_block(2)}}
|
||||
|
||||
{% if endpoint.responses|length > 0 -%}
|
||||
Response{{"s" if endpoint.responses|length > 1 else "" }}:
|
||||
|
||||
{% endif -%}
|
||||
|
||||
{% for res in endpoint.responses -%}
|
||||
|
||||
**Status code {{res["code"]}}:**
|
||||
|
||||
{{res["description"]}}
|
||||
|
||||
{% if res["example"] -%}
|
||||
|
||||
Example
|
||||
|
||||
.. code:: json
|
||||
|
||||
{{res["example"] | indent_block(2)}}
|
||||
|
||||
{% endif -%}
|
||||
|
||||
{% endfor %}
|
16
scripts/templating/matrix_templates/templates/msgtypes.tmpl
Normal file
16
scripts/templating/matrix_templates/templates/msgtypes.tmpl
Normal file
|
@ -0,0 +1,16 @@
|
|||
{% import 'tables.tmpl' as tables -%}
|
||||
|
||||
``{{event.msgtype}}``
|
||||
{{(4 + event.msgtype | length) * title_kind}}
|
||||
{{event.desc | wrap(80)}}
|
||||
{% for table in event.content_fields -%}
|
||||
{{"``"+table.title+"``" if table.title else "" }}
|
||||
|
||||
{{ tables.paramtable(table.rows, [(table.title or "Content") ~ " Key", "Type", "Description"]) }}
|
||||
|
||||
{% endfor %}
|
||||
Example:
|
||||
|
||||
.. code:: json
|
||||
|
||||
{{example | jsonify(4, 4)}}
|
103
scripts/templating/matrix_templates/templates/tables.tmpl
Normal file
103
scripts/templating/matrix_templates/templates/tables.tmpl
Normal file
|
@ -0,0 +1,103 @@
|
|||
{#
|
||||
# A set of macros for generating RST tables
|
||||
#}
|
||||
|
||||
|
||||
{#
|
||||
# write a table for a list of parameters.
|
||||
#
|
||||
# 'rows' is the list of parameters. Each row should be a TypeTableRow.
|
||||
#}
|
||||
{% macro paramtable(rows, titles=["Parameter", "Type", "Description"]) -%}
|
||||
{{ split_paramtable({None: rows}, titles) }}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{#
|
||||
# write a table for the request parameters, split by location.
|
||||
# 'rows_by_loc' is a map from location to a list of parameters.
|
||||
#
|
||||
# As a special case, if a key of 'rows_by_loc' is 'None', no title row is
|
||||
# written for that location. This is used by the standard 'paramtable' macro.
|
||||
#}
|
||||
{% macro split_paramtable(rows_by_loc,
|
||||
titles=["Parameter", "Type", "Description"]) -%}
|
||||
|
||||
{% set rowkeys = ['key', 'title', 'desc'] %}
|
||||
{% set titlerow = {'key': titles[0], 'title': titles[1], 'desc': titles[2]} %}
|
||||
|
||||
{# We need the rows flattened into a single list. Abuse the 'sum' filter to
|
||||
# join arrays instead of add numbers. -#}
|
||||
{% set flatrows = rows_by_loc.values()|sum(start=[]) -%}
|
||||
|
||||
{# Figure out the widths of the columns. The last column is always 50 characters
|
||||
# wide; the others default to 10, but stretch if there is wider text in the
|
||||
# column. -#}
|
||||
{% set fieldwidths = (([titlerow] + flatrows) |
|
||||
fieldwidths(rowkeys[0:-1], [10, 10])) + [50] -%}
|
||||
|
||||
{{ tableheader(fieldwidths) }}
|
||||
{{ tablerow(fieldwidths, titlerow, rowkeys) }}
|
||||
{{ tableheader(fieldwidths) }}
|
||||
{% for loc in rows_by_loc -%}
|
||||
|
||||
{% if loc != None -%}
|
||||
{{ tablespan(fieldwidths, "*" ~ loc ~ " parameters*") }}
|
||||
{% endif -%}
|
||||
|
||||
{% for row in rows_by_loc[loc] -%}
|
||||
{{ tablerow(fieldwidths, row, rowkeys) }}
|
||||
{% endfor -%}
|
||||
{% endfor -%}
|
||||
|
||||
{{ tableheader(fieldwidths) }}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
|
||||
{#
|
||||
# Write a table header row, for the given column widths
|
||||
#}
|
||||
{% macro tableheader(widths) -%}
|
||||
{% for arg in widths -%}
|
||||
{{"="*arg}} {% endfor -%}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
|
||||
{#
|
||||
# Write a normal table row. Each of 'widths' and 'keys' should be sequences
|
||||
# of the same length; 'widths' defines the column widths, and 'keys' the
|
||||
# attributes of 'row' to look up for values to put in the columns.
|
||||
#}
|
||||
{% macro tablerow(widths, row, keys) -%}
|
||||
{% for key in keys -%}
|
||||
{% set value=row[key] -%}
|
||||
{% if not loop.last -%}
|
||||
{# the first few columns need space after them -#}
|
||||
{{ value }}{{" "*(1+widths[loop.index0]-value|length) -}}
|
||||
{% else -%}
|
||||
{# the last column needs wrapping and indenting (by the sum of the widths of
|
||||
the preceding columns, plus the number of preceding columns (for the
|
||||
separators)) -#}
|
||||
{{ value | wrap(widths[loop.index0]) |
|
||||
indent_block(widths[0:-1]|sum + loop.index0) -}}
|
||||
{% endif -%}
|
||||
{% endfor -%}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
|
||||
|
||||
{#
|
||||
# write a tablespan row. This is a single value which spans the entire table.
|
||||
#}
|
||||
{% macro tablespan(widths, value) -%}
|
||||
{{value}}
|
||||
{# we write a trailing space to stop the separator being misinterpreted
|
||||
# as a header line. -#}
|
||||
{{"-"*(widths|sum + widths|length -1)}} {% endmacro %}
|
||||
|
||||
|
||||
|
||||
|
899
scripts/templating/matrix_templates/units.py
Normal file
899
scripts/templating/matrix_templates/units.py
Normal file
|
@ -0,0 +1,899 @@
|
|||
# Copyright 2016 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""
|
||||
Contains all the units for the spec.
|
||||
|
||||
This file loads swagger and JSON schema files and parses out the useful bits
|
||||
and returns them as Units for use in Batesian.
|
||||
|
||||
For the actual conversion of data -> RST (including templates), see the sections
|
||||
file instead.
|
||||
"""
|
||||
from batesian.units import Units
|
||||
from collections import OrderedDict
|
||||
import logging
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import urllib
|
||||
import yaml
|
||||
|
||||
matrix_doc_dir=reduce(lambda acc,_: os.path.dirname(acc),
|
||||
range(1, 5), os.path.abspath(__file__))
|
||||
|
||||
HTTP_APIS = {
|
||||
os.path.join(matrix_doc_dir, "api/application-service"): "as",
|
||||
os.path.join(matrix_doc_dir, "api/client-server"): "cs",
|
||||
os.path.join(matrix_doc_dir, "api/identity"): "is",
|
||||
os.path.join(matrix_doc_dir, "api/push-gateway"): "push",
|
||||
}
|
||||
EVENT_EXAMPLES = os.path.join(matrix_doc_dir, "event-schemas/examples")
|
||||
EVENT_SCHEMA = os.path.join(matrix_doc_dir, "event-schemas/schema")
|
||||
CORE_EVENT_SCHEMA = os.path.join(matrix_doc_dir, "event-schemas/schema/core-event-schema")
|
||||
CHANGELOG_DIR = os.path.join(matrix_doc_dir, "changelogs")
|
||||
TARGETS = os.path.join(matrix_doc_dir, "specification/targets.yaml")
|
||||
|
||||
ROOM_EVENT = "core-event-schema/room_event.yaml"
|
||||
STATE_EVENT = "core-event-schema/state_event.yaml"
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# a yaml Loader which loads mappings into OrderedDicts instead of regular
|
||||
# dicts, so that we preserve the ordering of properties from the api files.
|
||||
#
|
||||
# with thanks to http://stackoverflow.com/a/21912744/637864
|
||||
class OrderedLoader(yaml.Loader):
|
||||
pass
|
||||
def construct_mapping(loader, node):
|
||||
loader.flatten_mapping(node)
|
||||
pairs = loader.construct_pairs(node)
|
||||
return OrderedDict(pairs)
|
||||
OrderedLoader.add_constructor(
|
||||
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
|
||||
construct_mapping)
|
||||
|
||||
|
||||
class TypeTable(object):
|
||||
"""Describes a table documenting an object type
|
||||
|
||||
Attributes:
|
||||
title(str|None): Title of the table - normally the object type
|
||||
desc(str|None): description of the object
|
||||
rows(list[TypeTableRow]): the rows in the table
|
||||
"""
|
||||
def __init__(self, title=None, desc=None, rows=[]):
|
||||
self.title=title
|
||||
self.desc=desc
|
||||
self._rows = []
|
||||
for row in rows:
|
||||
self.add_row(row)
|
||||
|
||||
def add_row(self, row):
|
||||
if not isinstance(row, TypeTableRow):
|
||||
raise ValueError("Can only add TypeTableRows to TypeTable")
|
||||
|
||||
self._rows.append(row)
|
||||
|
||||
def __getattr__(self, item):
|
||||
if item == 'rows':
|
||||
return list(self._rows)
|
||||
return super(TypeTable, self).__getattr__(item)
|
||||
|
||||
def __repr__(self):
|
||||
return "TypeTable[%s, rows=%s]" % (self.title, self._rows)
|
||||
|
||||
|
||||
class TypeTableRow(object):
|
||||
"""Describes an object field defined in the json schema
|
||||
"""
|
||||
def __init__(self, key, title, desc, required=False):
|
||||
self.key = key
|
||||
self.title = title
|
||||
self.desc = desc
|
||||
self.required = required
|
||||
|
||||
def __repr__(self):
|
||||
return "TypeTableRow[%s: %s]" % (self.key, self.desc)
|
||||
|
||||
|
||||
def resolve_references(path, schema):
|
||||
if isinstance(schema, dict):
|
||||
# do $ref first
|
||||
if '$ref' in schema:
|
||||
value = schema['$ref']
|
||||
path = os.path.join(os.path.dirname(path), value)
|
||||
with open(path) as f:
|
||||
ref = yaml.load(f, OrderedLoader)
|
||||
result = resolve_references(path, ref)
|
||||
del schema['$ref']
|
||||
else:
|
||||
result = OrderedDict()
|
||||
|
||||
for key, value in schema.items():
|
||||
result[key] = resolve_references(path, value)
|
||||
return result
|
||||
elif isinstance(schema, list):
|
||||
return [resolve_references(path, value) for value in schema]
|
||||
else:
|
||||
return schema
|
||||
|
||||
|
||||
def inherit_parents(obj):
|
||||
"""
|
||||
Recurse through the 'allOf' declarations in the object
|
||||
"""
|
||||
logger.debug("inherit_parents %r" % obj)
|
||||
parents = obj.get("allOf", [])
|
||||
if not parents:
|
||||
return obj
|
||||
|
||||
result = {}
|
||||
|
||||
# settings defined in the child take priority over the parents, so we
|
||||
# iterate through the parents first, and then overwrite with the settings
|
||||
# from the child.
|
||||
for p in map(inherit_parents, parents) + [obj]:
|
||||
# child blats out type, title and description
|
||||
for key in ('type', 'title', 'description'):
|
||||
if p.get(key):
|
||||
result[key] = p[key]
|
||||
|
||||
# other fields get merged
|
||||
for key in ('required', ):
|
||||
if p.get(key):
|
||||
result.setdefault(key, []).extend(p[key])
|
||||
|
||||
for key in ('properties', 'additionalProperties', 'patternProperties'):
|
||||
if p.get(key):
|
||||
result.setdefault(key, OrderedDict()).update(p[key])
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def get_json_schema_object_fields(obj, enforce_title=False):
|
||||
"""Parse a JSON schema object definition
|
||||
|
||||
Args:
|
||||
obj(dict): definition from the JSON schema file. $refs should already
|
||||
have been resolved.
|
||||
enforce_title (bool): if True, and the definition has no "title",
|
||||
the 'title' result will be set to 'NO_TITLE' (otherwise it will be
|
||||
set to None)
|
||||
|
||||
Returns:
|
||||
dict: with the following fields:
|
||||
- title (str): title (normally the type name) for the object
|
||||
- tables (list[TypeTable]): list of the tables for the type
|
||||
definition
|
||||
"""
|
||||
# Algorithm:
|
||||
# f.e. property => add field info (if field is object then recurse)
|
||||
if obj.get("type") != "object":
|
||||
raise Exception(
|
||||
"get_json_schema_object_fields: Object %s isn't an object." % obj
|
||||
)
|
||||
|
||||
obj_title = obj.get("title")
|
||||
|
||||
logger.debug("Processing object with title '%s'", obj_title)
|
||||
|
||||
additionalProps = obj.get("additionalProperties")
|
||||
props = obj.get("properties")
|
||||
if additionalProps and not props:
|
||||
# not "really" an object, just a KV store
|
||||
logger.debug("%s is a pseudo-object", obj_title)
|
||||
|
||||
key_type = additionalProps.get("x-pattern", "string")
|
||||
res = process_data_type(additionalProps)
|
||||
return {
|
||||
"title": "{%s: %s}" % (key_type, res["title"]),
|
||||
"tables": res["tables"],
|
||||
}
|
||||
|
||||
if not props:
|
||||
props = obj.get("patternProperties")
|
||||
if props:
|
||||
# try to replace horrible regex key names with pretty x-pattern ones
|
||||
for key_name in props.keys():
|
||||
pretty_key = props[key_name].get("x-pattern")
|
||||
if pretty_key:
|
||||
props[pretty_key] = props[key_name]
|
||||
del props[key_name]
|
||||
|
||||
|
||||
|
||||
# Sometimes you just want to specify that a thing is an object without
|
||||
# doing all the keys.
|
||||
if not props:
|
||||
return {
|
||||
"title": obj_title if obj_title else 'object',
|
||||
"tables": [],
|
||||
}
|
||||
|
||||
if enforce_title and not obj_title:
|
||||
# Force a default titile of "NO_TITLE" to make it obvious in the
|
||||
# specification output which parts of the schema are missing a title
|
||||
obj_title = 'NO_TITLE'
|
||||
|
||||
required_keys = set(obj.get("required", []))
|
||||
|
||||
first_table_rows = []
|
||||
tables = []
|
||||
|
||||
for key_name in props:
|
||||
try:
|
||||
logger.debug("Processing property %s.%s", obj_title, key_name)
|
||||
required = key_name in required_keys
|
||||
res = process_data_type(props[key_name], required)
|
||||
|
||||
first_table_rows.append(TypeTableRow(
|
||||
key=key_name,
|
||||
title=res["title"],
|
||||
required=required,
|
||||
desc=res["desc"],
|
||||
))
|
||||
tables.extend(res["tables"])
|
||||
logger.debug("Done property %s" % key_name)
|
||||
|
||||
except Exception, e:
|
||||
e2 = Exception("Error reading property %s.%s: %s" %
|
||||
(obj_title, key_name, str(e)))
|
||||
# throw the new exception with the old stack trace, so that
|
||||
# we don't lose information about where the error occurred.
|
||||
raise e2, None, sys.exc_info()[2]
|
||||
|
||||
tables.insert(0, TypeTable(title=obj_title, rows=first_table_rows))
|
||||
|
||||
for table in tables:
|
||||
assert isinstance(table, TypeTable)
|
||||
|
||||
return {
|
||||
"title": obj_title,
|
||||
"tables": tables,
|
||||
}
|
||||
|
||||
|
||||
# process a data type definition. returns a dictionary with the keys:
|
||||
# title: stringified type name
|
||||
# desc: description
|
||||
# enum_desc: description of permissible enum fields
|
||||
# is_object: true if the data type is an object
|
||||
# tables: list of additional table definitions
|
||||
def process_data_type(prop, required=False, enforce_title=True):
|
||||
prop = inherit_parents(prop)
|
||||
|
||||
prop_type = prop['type']
|
||||
tables = []
|
||||
enum_desc = None
|
||||
is_object = False
|
||||
|
||||
if prop_type == "object":
|
||||
res = get_json_schema_object_fields(
|
||||
prop,
|
||||
enforce_title=enforce_title,
|
||||
)
|
||||
prop_title = res["title"]
|
||||
tables = res["tables"]
|
||||
is_object = True
|
||||
|
||||
elif prop_type == "array":
|
||||
nested = process_data_type(prop["items"])
|
||||
prop_title = "[%s]" % nested["title"]
|
||||
tables = nested["tables"]
|
||||
enum_desc = nested["enum_desc"]
|
||||
|
||||
else:
|
||||
prop_title = prop_type
|
||||
|
||||
if prop.get("enum"):
|
||||
if len(prop["enum"]) > 1:
|
||||
prop_title = "enum"
|
||||
enum_desc = (
|
||||
"One of: %s" % json.dumps(prop["enum"])
|
||||
)
|
||||
else:
|
||||
enum_desc = (
|
||||
"Must be '%s'." % prop["enum"][0]
|
||||
)
|
||||
|
||||
if isinstance(prop_title, list):
|
||||
prop_title = " or ".join(prop_title)
|
||||
|
||||
rq = "**Required.**" if required else None
|
||||
desc = " ".join(x for x in [rq, prop.get("description"), enum_desc] if x)
|
||||
|
||||
for table in tables:
|
||||
assert isinstance(table, TypeTable)
|
||||
|
||||
return {
|
||||
"title": prop_title,
|
||||
"desc": desc,
|
||||
"enum_desc": enum_desc,
|
||||
"is_object": is_object,
|
||||
"tables": tables,
|
||||
}
|
||||
|
||||
def deduplicate_tables(tables):
|
||||
# the result may contain duplicates, if objects are referred to more than
|
||||
# once. Filter them out.
|
||||
#
|
||||
# Go through the tables backwards so that we end up with a breadth-first
|
||||
# rather than depth-first ordering.
|
||||
|
||||
titles = set()
|
||||
filtered = []
|
||||
for table in reversed(tables):
|
||||
if table.title in titles:
|
||||
continue
|
||||
|
||||
titles.add(table.title)
|
||||
filtered.append(table)
|
||||
filtered.reverse()
|
||||
|
||||
return filtered
|
||||
|
||||
def get_tables_for_schema(schema):
|
||||
pv = process_data_type(schema, enforce_title=False)
|
||||
return deduplicate_tables(pv["tables"])
|
||||
|
||||
def get_tables_for_response(schema):
|
||||
pv = process_data_type(schema, enforce_title=False)
|
||||
tables = deduplicate_tables(pv["tables"])
|
||||
|
||||
# make up the first table, with just the 'body' row in, unless the response
|
||||
# is an object, in which case there's little point in having one.
|
||||
if not pv["is_object"]:
|
||||
first_table_row = TypeTableRow(
|
||||
key="<body>", title=pv["title"], desc=pv["desc"],
|
||||
)
|
||||
tables.insert(0, TypeTable(None, rows=[first_table_row]))
|
||||
|
||||
logger.debug("response: %r" % tables)
|
||||
|
||||
return tables
|
||||
|
||||
def get_example_for_schema(schema):
|
||||
"""Returns a python object representing a suitable example for this object"""
|
||||
schema = inherit_parents(schema)
|
||||
if 'example' in schema:
|
||||
example = schema['example']
|
||||
return example
|
||||
|
||||
proptype = schema['type']
|
||||
|
||||
if proptype == 'object':
|
||||
if 'properties' not in schema:
|
||||
raise Exception('"object" property has neither properties nor example')
|
||||
res = OrderedDict()
|
||||
for prop_name, prop in schema['properties'].iteritems():
|
||||
logger.debug("Parsing property %r" % prop_name)
|
||||
prop_example = get_example_for_schema(prop)
|
||||
res[prop_name] = prop_example
|
||||
return res
|
||||
|
||||
if proptype == 'array':
|
||||
if 'items' not in schema:
|
||||
raise Exception('"array" property has neither items nor example')
|
||||
return [get_example_for_schema(schema['items'])]
|
||||
|
||||
if proptype == 'integer':
|
||||
return 0
|
||||
|
||||
if proptype == 'string':
|
||||
return proptype
|
||||
|
||||
raise Exception("Don't know to make an example %s" % proptype)
|
||||
|
||||
def get_example_for_param(param):
|
||||
"""Returns a stringified example for a parameter"""
|
||||
if 'x-example' in param:
|
||||
return param['x-example']
|
||||
schema = param.get('schema')
|
||||
if not schema:
|
||||
return None
|
||||
|
||||
exampleobj = None
|
||||
if 'example' in schema:
|
||||
exampleobj = schema['example']
|
||||
|
||||
if exampleobj is None:
|
||||
exampleobj = get_example_for_schema(schema)
|
||||
|
||||
return json.dumps(exampleobj, indent=2)
|
||||
|
||||
def get_example_for_response(response):
|
||||
"""Returns a stringified example for a response"""
|
||||
exampleobj = None
|
||||
if 'examples' in response:
|
||||
exampleobj = response["examples"].get("application/json")
|
||||
|
||||
if exampleobj is None:
|
||||
schema = response.get('schema')
|
||||
if schema:
|
||||
if schema['type'] == 'file':
|
||||
# no example for 'file' responses
|
||||
return None
|
||||
exampleobj = get_example_for_schema(schema)
|
||||
|
||||
if exampleobj is None:
|
||||
return None
|
||||
|
||||
return json.dumps(exampleobj, indent=2)
|
||||
|
||||
class MatrixUnits(Units):
|
||||
def _load_swagger_meta(self, api, group_name):
|
||||
endpoints = []
|
||||
base_path = api.get("basePath", "")
|
||||
|
||||
for path in api["paths"]:
|
||||
for method in api["paths"][path]:
|
||||
logger.info(" ------- Endpoint: %s %s ------- " % (method, path))
|
||||
|
||||
try:
|
||||
endpoint = self._handle_endpoint(
|
||||
api["paths"][path][method], method,
|
||||
base_path.rstrip("/") + path)
|
||||
|
||||
endpoints.append(endpoint)
|
||||
except Exception as e:
|
||||
logger.error("Error handling endpoint %s %s: %s",
|
||||
method, path, e)
|
||||
raise
|
||||
return {
|
||||
"base": api.get("basePath").rstrip("/"),
|
||||
"group": group_name,
|
||||
"endpoints": endpoints,
|
||||
}
|
||||
|
||||
def _handle_endpoint(self, endpoint_swagger, method, path):
|
||||
endpoint = {
|
||||
"title": endpoint_swagger.get("summary", ""),
|
||||
"deprecated": endpoint_swagger.get("deprecated", False),
|
||||
"desc": endpoint_swagger.get("description",
|
||||
endpoint_swagger.get("summary", "")),
|
||||
"method": method.upper(),
|
||||
"path": path.strip(),
|
||||
"requires_auth": "security" in endpoint_swagger,
|
||||
"rate_limited": 429 in endpoint_swagger.get("responses", {}),
|
||||
"req_param_by_loc": {},
|
||||
"req_body_tables": [],
|
||||
"res_headers": None,
|
||||
"res_tables": [],
|
||||
"responses": [],
|
||||
"example": {
|
||||
"req": "",
|
||||
}
|
||||
}
|
||||
path_template = path
|
||||
example_query_params = []
|
||||
example_body = ""
|
||||
for param in endpoint_swagger.get("parameters", []):
|
||||
# even body params should have names, otherwise the active docs don't work.
|
||||
param_name = param["name"]
|
||||
|
||||
try:
|
||||
param_loc = param["in"]
|
||||
|
||||
if param_loc == "body":
|
||||
self._handle_body_param(param, endpoint)
|
||||
example_body = get_example_for_param(param)
|
||||
continue
|
||||
|
||||
# description
|
||||
desc = param.get("description", "")
|
||||
if param.get("required"):
|
||||
desc = "**Required.** " + desc
|
||||
|
||||
# assign value expected for this param
|
||||
val_type = param.get("type") # integer/string
|
||||
|
||||
if param.get("enum"):
|
||||
val_type = "enum"
|
||||
desc += (
|
||||
" One of: %s" % json.dumps(param.get("enum"))
|
||||
)
|
||||
|
||||
endpoint["req_param_by_loc"].setdefault(param_loc, []).append(
|
||||
TypeTableRow(key=param_name, title=val_type, desc=desc),
|
||||
)
|
||||
|
||||
example = get_example_for_param(param)
|
||||
if example is None:
|
||||
continue
|
||||
|
||||
if param_loc == "path":
|
||||
path_template = path_template.replace(
|
||||
"{%s}" % param_name, urllib.quote(example)
|
||||
)
|
||||
elif param_loc == "query":
|
||||
if type(example) == list:
|
||||
for value in example:
|
||||
example_query_params.append((param_name, value))
|
||||
else:
|
||||
example_query_params.append((param_name, example))
|
||||
|
||||
except Exception, e:
|
||||
raise Exception("Error handling parameter %s" % param_name, e)
|
||||
# endfor[param]
|
||||
good_response = None
|
||||
for code in sorted(endpoint_swagger.get("responses", {}).keys()):
|
||||
res = endpoint_swagger["responses"][code]
|
||||
if not good_response and code == 200:
|
||||
good_response = res
|
||||
description = res.get("description", "")
|
||||
example = get_example_for_response(res)
|
||||
endpoint["responses"].append({
|
||||
"code": code,
|
||||
"description": description,
|
||||
"example": example,
|
||||
})
|
||||
|
||||
# add response params if this API has any.
|
||||
if good_response:
|
||||
if "schema" in good_response:
|
||||
endpoint["res_tables"] = get_tables_for_response(
|
||||
good_response["schema"]
|
||||
)
|
||||
if "headers" in good_response:
|
||||
headers = TypeTable()
|
||||
for (header_name, header) in good_response["headers"].iteritems():
|
||||
headers.add_row(
|
||||
TypeTableRow(key=header_name, title=header["type"],
|
||||
desc=header["description"]),
|
||||
)
|
||||
endpoint["res_headers"] = headers
|
||||
query_string = "" if len(
|
||||
example_query_params) == 0 else "?" + urllib.urlencode(
|
||||
example_query_params)
|
||||
if example_body:
|
||||
endpoint["example"][
|
||||
"req"] = "%s %s%s HTTP/1.1\nContent-Type: application/json\n\n%s" % (
|
||||
method.upper(), path_template, query_string, example_body
|
||||
)
|
||||
else:
|
||||
endpoint["example"]["req"] = "%s %s%s HTTP/1.1\n\n" % (
|
||||
method.upper(), path_template, query_string
|
||||
)
|
||||
return endpoint
|
||||
|
||||
def _handle_body_param(self, param, endpoint_data):
|
||||
"""Update endpoint_data object with the details of the body param
|
||||
:param string filepath path to the yaml
|
||||
:param dict param the parameter data from the yaml
|
||||
:param dict endpoint_data dictionary of endpoint data to be updated
|
||||
"""
|
||||
try:
|
||||
schema = inherit_parents(param["schema"])
|
||||
if schema["type"] != "object":
|
||||
logger.warn(
|
||||
"Unsupported body type %s for %s %s", schema["type"],
|
||||
endpoint_data["method"], endpoint_data["path"]
|
||||
)
|
||||
return
|
||||
|
||||
req_body_tables = get_tables_for_schema(schema)
|
||||
|
||||
if req_body_tables == []:
|
||||
# no fields defined for the body.
|
||||
return
|
||||
|
||||
# put the top-level parameters into 'req_param_by_loc', and the others
|
||||
# into 'req_body_tables'
|
||||
body_params = endpoint_data['req_param_by_loc'].setdefault("JSON body",[])
|
||||
body_params.extend(req_body_tables[0].rows)
|
||||
|
||||
body_tables = req_body_tables[1:]
|
||||
endpoint_data['req_body_tables'].extend(body_tables)
|
||||
|
||||
except Exception, e:
|
||||
e2 = Exception(
|
||||
"Error decoding body of API endpoint %s %s: %s" %
|
||||
(endpoint_data["method"], endpoint_data["path"], e)
|
||||
)
|
||||
raise e2, None, sys.exc_info()[2]
|
||||
|
||||
|
||||
def load_swagger_apis(self):
|
||||
apis = {}
|
||||
for path, suffix in HTTP_APIS.items():
|
||||
for filename in os.listdir(path):
|
||||
if not filename.endswith(".yaml"):
|
||||
continue
|
||||
filepath = os.path.join(path, filename)
|
||||
logger.info("Reading swagger API: %s" % filepath)
|
||||
with open(filepath, "r") as f:
|
||||
# strip .yaml
|
||||
group_name = filename[:-5].replace("-", "_")
|
||||
group_name = "%s_%s" % (group_name, suffix)
|
||||
api = yaml.load(f.read(), OrderedLoader)
|
||||
api = resolve_references(filepath, api)
|
||||
api["__meta"] = self._load_swagger_meta(
|
||||
api, group_name
|
||||
)
|
||||
apis[group_name] = api
|
||||
return apis
|
||||
|
||||
def load_common_event_fields(self):
|
||||
"""Parse the core event schema files
|
||||
|
||||
Returns:
|
||||
dict: with the following properties:
|
||||
"title": Event title (from the 'title' field of the schema)
|
||||
"desc": desc
|
||||
"tables": list[TypeTable]
|
||||
"""
|
||||
path = CORE_EVENT_SCHEMA
|
||||
event_types = {}
|
||||
|
||||
for filename in os.listdir(path):
|
||||
if not filename.endswith(".yaml"):
|
||||
continue
|
||||
|
||||
filepath = os.path.join(path, filename)
|
||||
|
||||
event_type = filename[:-5] # strip the ".yaml"
|
||||
logger.info("Reading event schema: %s" % filepath)
|
||||
|
||||
with open(filepath) as f:
|
||||
event_schema = yaml.load(f, OrderedLoader)
|
||||
|
||||
schema_info = process_data_type(
|
||||
event_schema,
|
||||
enforce_title=True,
|
||||
)
|
||||
event_types[event_type] = schema_info
|
||||
return event_types
|
||||
|
||||
def load_apis(self, substitutions):
|
||||
cs_ver = substitutions.get("%CLIENT_RELEASE_LABEL%", "unstable")
|
||||
fed_ver = substitutions.get("%SERVER_RELEASE_LABEL%", "unstable")
|
||||
|
||||
# we abuse the typetable to return this info to the templates
|
||||
return TypeTable(rows=[
|
||||
TypeTableRow(
|
||||
"`Client-Server API <client_server/"+cs_ver+".html>`_",
|
||||
cs_ver,
|
||||
"Interaction between clients and servers",
|
||||
), TypeTableRow(
|
||||
"`Server-Server API <server_server/"+fed_ver+".html>`_",
|
||||
fed_ver,
|
||||
"Federation between servers",
|
||||
), TypeTableRow(
|
||||
"`Application Service API <application_service/unstable.html>`_",
|
||||
"unstable",
|
||||
"Privileged server plugins",
|
||||
), TypeTableRow(
|
||||
"`Identity Service API <identity_service/unstable.html>`_",
|
||||
"unstable",
|
||||
"Mapping of third party IDs to Matrix IDs",
|
||||
), TypeTableRow(
|
||||
"`Push Gateway API <push_gateway/unstable.html>`_",
|
||||
"unstable",
|
||||
"Push notifications for Matrix events",
|
||||
),
|
||||
])
|
||||
|
||||
def load_event_examples(self):
|
||||
path = EVENT_EXAMPLES
|
||||
examples = {}
|
||||
for filename in os.listdir(path):
|
||||
if not filename.startswith("m."):
|
||||
continue
|
||||
with open(os.path.join(path, filename), "r") as f:
|
||||
event_name = filename.split("#")[0]
|
||||
example = json.loads(f.read())
|
||||
|
||||
examples[filename] = examples.get(filename, [])
|
||||
examples[filename].append(example)
|
||||
if filename != event_name:
|
||||
examples[event_name] = examples.get(event_name, [])
|
||||
examples[event_name].append(example)
|
||||
return examples
|
||||
|
||||
def load_event_schemas(self):
|
||||
path = EVENT_SCHEMA
|
||||
schemata = {}
|
||||
|
||||
for filename in os.listdir(path):
|
||||
if not filename.startswith("m."):
|
||||
continue
|
||||
filepath = os.path.join(path, filename)
|
||||
try:
|
||||
schemata[filename] = self.read_event_schema(filepath)
|
||||
except Exception, e:
|
||||
e2 = Exception("Error reading event schema "+filepath+": "+
|
||||
str(e))
|
||||
# throw the new exception with the old stack trace, so that
|
||||
# we don't lose information about where the error occurred.
|
||||
raise e2, None, sys.exc_info()[2]
|
||||
|
||||
return schemata
|
||||
|
||||
def read_event_schema(self, filepath):
|
||||
logger.info("Reading %s" % filepath)
|
||||
|
||||
with open(filepath, "r") as f:
|
||||
json_schema = yaml.load(f, OrderedLoader)
|
||||
|
||||
schema = {
|
||||
# one of "Message Event" or "State Event"
|
||||
"typeof": "",
|
||||
"typeof_info": "",
|
||||
|
||||
# event type, eg "m.room.member". Note *not* the type of the
|
||||
# event object (which should always be 'object').
|
||||
"type": None,
|
||||
"title": None,
|
||||
"desc": None,
|
||||
"msgtype": None,
|
||||
"content_fields": [
|
||||
# <TypeTable>
|
||||
]
|
||||
}
|
||||
|
||||
# before we resolve the references, see if the first reference is to
|
||||
# the message event or state event schemas, and add typeof info if so.
|
||||
base_defs = {
|
||||
ROOM_EVENT: "Message Event",
|
||||
STATE_EVENT: "State Event"
|
||||
}
|
||||
if type(json_schema.get("allOf")) == list:
|
||||
firstRef = json_schema["allOf"][0]["$ref"]
|
||||
if firstRef in base_defs:
|
||||
schema["typeof"] = base_defs[firstRef]
|
||||
|
||||
json_schema = resolve_references(filepath, json_schema)
|
||||
|
||||
# add type
|
||||
schema["type"] = Units.prop(
|
||||
json_schema, "properties/type/enum"
|
||||
)[0]
|
||||
|
||||
# add summary and desc
|
||||
schema["title"] = json_schema.get("title")
|
||||
schema["desc"] = json_schema.get("description", "")
|
||||
|
||||
# walk the object for field info
|
||||
schema["content_fields"] = get_tables_for_schema(
|
||||
Units.prop(json_schema, "properties/content")
|
||||
)
|
||||
|
||||
# This is horrible because we're special casing a key on m.room.member.
|
||||
# We need to do this because we want to document a non-content object.
|
||||
if schema["type"] == "m.room.member":
|
||||
invite_room_state = get_tables_for_schema(
|
||||
json_schema["properties"]["invite_room_state"]["items"],
|
||||
)
|
||||
schema["content_fields"].extend(invite_room_state)
|
||||
|
||||
|
||||
# grab msgtype if it is the right kind of event
|
||||
msgtype = Units.prop(
|
||||
json_schema, "properties/content/properties/msgtype/enum"
|
||||
)
|
||||
if msgtype:
|
||||
schema["msgtype"] = msgtype[0] # enum prop
|
||||
|
||||
# link to msgtypes for m.room.message
|
||||
if schema["type"] == "m.room.message" and not msgtype:
|
||||
schema["desc"] += (
|
||||
" For more information on ``msgtypes``, see "+
|
||||
"`m.room.message msgtypes`_."
|
||||
)
|
||||
|
||||
# Assign state key info if it has some
|
||||
if schema["typeof"] == "State Event":
|
||||
skey_desc = Units.prop(
|
||||
json_schema, "properties/state_key/description"
|
||||
)
|
||||
if not skey_desc:
|
||||
raise Exception("Missing description for state_key")
|
||||
schema["typeof_info"] = "``state_key``: %s" % skey_desc
|
||||
|
||||
return schema
|
||||
|
||||
def load_changelogs(self):
|
||||
changelogs = {}
|
||||
|
||||
for f in os.listdir(CHANGELOG_DIR):
|
||||
if not f.endswith(".rst"):
|
||||
continue
|
||||
path = os.path.join(CHANGELOG_DIR, f)
|
||||
name = f[:-4]
|
||||
|
||||
title_part = None
|
||||
changelog_lines = []
|
||||
with open(path, "r") as f:
|
||||
lines = f.readlines()
|
||||
prev_line = None
|
||||
for line in lines:
|
||||
if prev_line is None:
|
||||
prev_line = line
|
||||
continue
|
||||
if not title_part:
|
||||
# find the title underline (at least 3 =)
|
||||
if re.match("^[=]{3,}$", line.strip()):
|
||||
title_part = prev_line
|
||||
continue
|
||||
prev_line = line
|
||||
else: # have title, get body (stop on next title or EOF)
|
||||
if re.match("^[=]{3,}$", line.strip()):
|
||||
# we added the title in the previous iteration, pop it
|
||||
# then bail out.
|
||||
changelog_lines.pop()
|
||||
break
|
||||
changelog_lines.append(" " + line)
|
||||
changelogs[name] = "".join(changelog_lines)
|
||||
|
||||
return changelogs
|
||||
|
||||
|
||||
def load_spec_targets(self):
|
||||
with open(TARGETS, "r") as f:
|
||||
return yaml.load(f.read())
|
||||
|
||||
|
||||
def load_git_version(self):
|
||||
null = open(os.devnull, 'w')
|
||||
cwd = os.path.dirname(os.path.abspath(__file__))
|
||||
try:
|
||||
git_branch = subprocess.check_output(
|
||||
['git', 'rev-parse', '--abbrev-ref', 'HEAD'],
|
||||
stderr=null,
|
||||
cwd=cwd,
|
||||
).strip()
|
||||
except subprocess.CalledProcessError:
|
||||
git_branch = ""
|
||||
try:
|
||||
git_tag = subprocess.check_output(
|
||||
['git', 'describe', '--exact-match'],
|
||||
stderr=null,
|
||||
cwd=cwd,
|
||||
).strip()
|
||||
git_tag = "tag=" + git_tag
|
||||
except subprocess.CalledProcessError:
|
||||
git_tag = ""
|
||||
try:
|
||||
git_commit = subprocess.check_output(
|
||||
['git', 'rev-parse', '--short', 'HEAD'],
|
||||
stderr=null,
|
||||
cwd=cwd,
|
||||
).strip()
|
||||
except subprocess.CalledProcessError:
|
||||
git_commit = ""
|
||||
try:
|
||||
dirty_string = "-this_is_a_dirty_checkout"
|
||||
is_dirty = subprocess.check_output(
|
||||
['git', 'describe', '--dirty=' + dirty_string, "--all"],
|
||||
stderr=null,
|
||||
cwd=cwd,
|
||||
).strip().endswith(dirty_string)
|
||||
git_dirty = "dirty" if is_dirty else ""
|
||||
except subprocess.CalledProcessError:
|
||||
git_dirty = ""
|
||||
|
||||
git_version = "Unknown"
|
||||
if git_branch or git_tag or git_commit or git_dirty:
|
||||
git_version = ",".join(
|
||||
s for s in
|
||||
(git_branch, git_tag, git_commit, git_dirty,)
|
||||
if s
|
||||
).encode("ascii")
|
||||
return {
|
||||
"string": git_version,
|
||||
"revision": git_commit
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue