c336cf3cea
* works with python3 * argparse used for argument handling * script can be run from tools dir and from repo's top dir as well * support for schema 2.0 added * support for add-ons added * creates single guideline for core + add-ons on same date * added --file and --all options * keep old script in file jsonToRst_schema_1.py Task: 42762 Story: 2009028 Change-Id: I6deb70fa39e8a51fcff0372217bbfc46282d7237
300 lines
9.4 KiB
Python
Executable File
300 lines
9.4 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
#
|
|
# Copyright 2015 Alexander Hirschfeld
|
|
# Copyright 2021 Red Hat, Inc.
|
|
#
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
# not use this file except in compliance with the License. You may obtain
|
|
# a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
# License for the specific language governing permissions and limitations
|
|
# under the License.
|
|
#
|
|
"""
|
|
This script will convert .json guideline to .rst guideline.
|
|
|
|
Only schema 2.0 is supported
|
|
|
|
1. It is possible to convert single file with --file option.
|
|
This option takes filename (not file path) as argument.
|
|
(File has to be located either in add-ons interop/add-ons/guidelines
|
|
directory or interop/guidelines directory)
|
|
|
|
2. It is possible to convert core guidelines + add-ons guidelines into
|
|
single file with --all option.
|
|
This option takes date of guideline release as argument.
|
|
|
|
3. It is possible to specify output directory with --outdir option.
|
|
This option takes path to output directory as argument.
|
|
If this option isn't used file will be stored in interop/doc/source/guidelines
|
|
|
|
Examples:
|
|
|
|
[Generating out.2020.11.rst file to interop/doc/source/guidelines directory]
|
|
python3 jsonToRst.py --file 2020.11.json
|
|
|
|
[Generating all.2020.11.rst file to interop/doc/source/guidelines directory
|
|
(core + add-ons)]
|
|
python3 jsonToRst.py --all 2020.11
|
|
|
|
[Generating out.2020.11.rst file and out.dns.2020.11.rst file to
|
|
interop/doc/source/guidelines directory]
|
|
python3 jsonToRst.py --file 2020.11.json --file dns.2020.11.json
|
|
|
|
[Generating out.2020.11.rst file to current directory]
|
|
python3 jsonToRst.py --file 2020.11.json --outdir .
|
|
"""
|
|
|
|
import argparse
|
|
import json
|
|
from json.decoder import JSONDecodeError
|
|
import os
|
|
import sys
|
|
import textwrap
|
|
|
|
|
|
def print_help_arrays(input):
|
|
if not input:
|
|
return None
|
|
output = ""
|
|
for i in input:
|
|
output = output + i.capitalize() + ', '
|
|
return output[0:-2]
|
|
|
|
|
|
def print_error(msg):
|
|
print(msg)
|
|
sys.exit(1)
|
|
|
|
|
|
def parse_arguments():
|
|
default_outdir = os.path.join(os.path.dirname(os.path.realpath(__file__)),
|
|
"../doc/source/guidelines/")
|
|
parser = argparse.ArgumentParser(__doc__)
|
|
parser.add_argument('--file', help='Creates guideline for single file',
|
|
action='append')
|
|
parser.add_argument('--all',
|
|
help='Creates complete guideline(core + add-ons)',
|
|
metavar='DATE')
|
|
parser.add_argument('--outdir',
|
|
help='Path to output file',
|
|
metavar='FILENAME',
|
|
default=default_outdir)
|
|
return parser.parse_args()
|
|
|
|
|
|
def get_file_path(in_file_name):
|
|
|
|
# get interop repo path
|
|
interop_path = os.path.realpath(__file__).replace('/tools/jsonToRst.py',
|
|
'')
|
|
possible_paths = {
|
|
'platform': interop_path + '/guidelines/',
|
|
'add-ons': interop_path + '/add-ons/guidelines/',
|
|
}
|
|
|
|
# check if file exists
|
|
if os.path.isfile(possible_paths['platform'] + in_file_name):
|
|
return possible_paths['platform'] + in_file_name
|
|
elif os.path.isfile(possible_paths['add-ons'] + in_file_name):
|
|
return possible_paths['add-ons'] + in_file_name
|
|
else:
|
|
return None
|
|
|
|
|
|
def write_intro(data, out_file):
|
|
metadata = data.get('metadata')
|
|
|
|
if metadata.get('id') is None:
|
|
print_error('Make sure there is a valid id')
|
|
|
|
line01 = "OpenStack Interoperability Guideline %s" % metadata["id"]
|
|
|
|
out_file.write('=' * len(line01) + '\n')
|
|
out_file.write(line01 + '\n')
|
|
out_file.write('=' * len(line01) + '\n')
|
|
out_file.write("""
|
|
:Status: {status}
|
|
:Replaces: {replaces}
|
|
:JSON Master: {source}
|
|
|
|
This document outlines the mandatory capabilities and designated
|
|
sections required to exist in a software installation in order to
|
|
be eligible to use marks controlled by the OpenStack Foundation.
|
|
|
|
This document was generated from the `<{id}.json>`_.
|
|
|
|
Releases Covered
|
|
==============================
|
|
Applies to {releases}
|
|
|
|
""".format(status=metadata["os_trademark_approval"].get("status"),
|
|
replaces=metadata["os_trademark_approval"].get("replaces"),
|
|
source=metadata.get("source"),
|
|
id=metadata.get("id"),
|
|
releases=print_help_arrays(
|
|
metadata["os_trademark_approval"].get("releases"))))
|
|
|
|
|
|
def write_components(data, out_file):
|
|
# looping
|
|
if data.get('components') is None:
|
|
print_error("No components found")
|
|
|
|
components = sorted(data["components"].keys())
|
|
order = ["required", "advisory", "deprecated", "removed"]
|
|
for component in components:
|
|
out_file.write("""
|
|
|
|
|
|
|
|
{component} Component Capabilities
|
|
""".format(component=component.capitalize()))
|
|
out_file.write('=' * (len(component) + 23)) # footer
|
|
|
|
for event in order:
|
|
|
|
out_file.write("\n{event} Capabilities\n".format(
|
|
event=event.capitalize()))
|
|
out_file.write("-" * (len(event) + 15) + "\n")
|
|
|
|
if(len(data['components'][component]['capabilities'][event]) == 0):
|
|
out_file.write("None\n")
|
|
|
|
for req in data['components'][component]['capabilities'][event]:
|
|
try:
|
|
data['capabilities'][req]
|
|
except KeyError:
|
|
print("[WARNING] " + event + " section doesn't exist in " +
|
|
"capabilities")
|
|
continue
|
|
|
|
out_file.write("* {name} ({project})\n".format(
|
|
name=req,
|
|
project=data["capabilities"][req].get(
|
|
"project").capitalize()))
|
|
|
|
|
|
def write_designated_sections(data, out_file):
|
|
wrapper = textwrap.TextWrapper(width=79, subsequent_indent=' ')
|
|
|
|
if 'designated_sections' not in data:
|
|
print_error("designated_sections not in json file")
|
|
|
|
out_file.write("""
|
|
|
|
Designated Sections
|
|
=====================================
|
|
|
|
The following designated sections apply to the same releases as
|
|
this specification.""")
|
|
order = ['required', 'advisory', 'deprecated', 'removed']
|
|
components = data.get("designated_sections")
|
|
|
|
sections_components = {}
|
|
|
|
for component in components:
|
|
section = list(data["designated_sections"].get(component).keys())[0]
|
|
if section not in sections_components.keys():
|
|
sections_components[section] = [component]
|
|
else:
|
|
sections_components[section].append(component)
|
|
|
|
for event in order:
|
|
|
|
out_file.write('\n\n{event} Designated Sections\n'.format(
|
|
event=event.capitalize()))
|
|
# +20 is for length of header
|
|
out_file.write('-' * (len(event) + 20) + '\n\n')
|
|
|
|
if event not in sections_components:
|
|
out_file.write('None')
|
|
continue
|
|
|
|
names = sorted(sections_components[event])
|
|
|
|
outlines = []
|
|
for name in names:
|
|
|
|
outlines.append(
|
|
wrapper.fill(
|
|
"* {name} : {guide}".format(
|
|
name=name.capitalize(),
|
|
guide=components[name][event].get('guidance'))))
|
|
out_file.write("\n".join(outlines))
|
|
|
|
out_file.write('\n')
|
|
|
|
|
|
def run(in_file_names, out_file_path):
|
|
with open(out_file_path, "w") as out_file:
|
|
for in_file_name in in_file_names:
|
|
|
|
in_file_path = get_file_path(in_file_name)
|
|
|
|
if in_file_path is None:
|
|
print('[WARNING] File ' +
|
|
in_file_name +
|
|
' does not exist! SKIPPING')
|
|
continue
|
|
|
|
print('[ INFO ] Reading from: ' + in_file_path)
|
|
|
|
with open(in_file_path) as f:
|
|
try:
|
|
data = json.load(f)
|
|
except JSONDecodeError:
|
|
print('[WARNING] Make sure ' +
|
|
in_file_path +
|
|
' is a valid JSON file! SKIPPING')
|
|
continue
|
|
|
|
print('[ INFO ] Writing to: ' + out_file_path)
|
|
|
|
# intro
|
|
write_intro(data, out_file)
|
|
|
|
# components
|
|
write_components(data, out_file)
|
|
|
|
# Designated -Sections
|
|
write_designated_sections(data, out_file)
|
|
|
|
# check whether output file contains anything
|
|
if os.path.getsize(out_file_path) == 0:
|
|
print('[ ERROR ] Output file is empty. REMOVING FILE')
|
|
os.remove(out_file_path)
|
|
|
|
|
|
if __name__ == '__main__':
|
|
args = parse_arguments()
|
|
|
|
# create guideline for single file
|
|
if args.file is not None:
|
|
for file in args.file:
|
|
out_file_path = os.path.join(args.outdir,
|
|
"out." + file.replace("json", "rst"))
|
|
run([file], out_file_path)
|
|
|
|
# create single guideline for core and all add-ons
|
|
if args.all is not None:
|
|
date = args.all
|
|
|
|
# input files names
|
|
files = [
|
|
date + ".json",
|
|
"dns." + date + ".json",
|
|
"orchestration." + date + ".json",
|
|
"shared_file_system." + date + ".json",
|
|
]
|
|
|
|
out_file_name = "all." + date + ".rst"
|
|
out_file_path = os.path.join(args.outdir, out_file_name)
|
|
|
|
run(files, out_file_path)
|