Remove extraneous list casting
This commit is contained in:
parent
9e0fafbcd4
commit
ebc7db12fb
9 changed files with 27 additions and 27 deletions
|
@ -88,9 +88,9 @@ def check_swagger_file(filepath):
|
|||
with open(filepath) as f:
|
||||
swagger = yaml.load(f)
|
||||
|
||||
for path, path_api in list(swagger.get('paths', {}).items()):
|
||||
for path, path_api in swagger.get('paths', {}).items():
|
||||
|
||||
for method, request_api in list(path_api.items()):
|
||||
for method, request_api in path_api.items():
|
||||
request = "%s %s" % (method.upper(), path)
|
||||
for parameter in request_api.get('parameters', ()):
|
||||
if parameter['in'] == 'body':
|
||||
|
@ -100,7 +100,7 @@ def check_swagger_file(filepath):
|
|||
responses = request_api['responses']
|
||||
except KeyError:
|
||||
raise ValueError("No responses for %r" % (request,))
|
||||
for code, response in list(responses.items()):
|
||||
for code, response in responses.items():
|
||||
check_response(filepath, request, code, response)
|
||||
|
||||
|
||||
|
|
|
@ -94,11 +94,11 @@ for filename in os.listdir(cs_api_dir):
|
|||
api = units.resolve_references(filepath, api)
|
||||
|
||||
basePath = api['basePath']
|
||||
for path, methods in list(api["paths"].items()):
|
||||
for path, methods in api["paths"].items():
|
||||
path = (basePath + path).replace('%CLIENT_MAJOR_VERSION%',
|
||||
major_version)
|
||||
for method, spec in list(methods.items()):
|
||||
if "tags" in list(spec.keys()):
|
||||
for method, spec in methods.items():
|
||||
if "tags" in spec.keys():
|
||||
if path not in output["paths"]:
|
||||
output["paths"][path] = {}
|
||||
output["paths"][path][method] = spec
|
||||
|
|
|
@ -299,7 +299,7 @@ def run_through_template(input_files, set_verbose, substitutions):
|
|||
"-i", "matrix_templates",
|
||||
]
|
||||
|
||||
for k, v in list(substitutions.items()):
|
||||
for k, v in substitutions.items():
|
||||
args.append("--substitution=%s=%s" % (k, v))
|
||||
|
||||
if set_verbose:
|
||||
|
@ -359,7 +359,7 @@ def get_build_target(all_targets, target_name):
|
|||
for i, entry in enumerate(group):
|
||||
if isinstance(entry, dict):
|
||||
group[i] = {
|
||||
(rel_depth + depth): v for (rel_depth, v) in list(entry.items())
|
||||
(rel_depth + depth): v for (rel_depth, v) in entry.items()
|
||||
}
|
||||
return group
|
||||
|
||||
|
@ -378,7 +378,7 @@ def get_build_target(all_targets, target_name):
|
|||
# file_entry is a dict which has more file entries as values
|
||||
elif isinstance(file_entry, dict):
|
||||
resolved_entry = {}
|
||||
for (depth, entry) in list(file_entry.items()):
|
||||
for (depth, entry) in file_entry.items():
|
||||
if not isinstance(entry, str):
|
||||
raise Exception(
|
||||
"Double-nested depths are not supported. Entry: %s" % (file_entry,)
|
||||
|
@ -429,7 +429,7 @@ def main(targets, dest_dir, keep_intermediates, substitutions):
|
|||
target_defs = yaml.load(targ_file.read())
|
||||
|
||||
if targets == ["all"]:
|
||||
targets = list(target_defs["targets"].keys())
|
||||
targets = target_defs["targets"].keys()
|
||||
|
||||
log("Building spec [targets=%s]" % targets)
|
||||
|
||||
|
@ -443,17 +443,17 @@ def main(targets, dest_dir, keep_intermediates, substitutions):
|
|||
templated_files[target_name] = templated_file
|
||||
|
||||
# we do all the templating at once, because it's slow
|
||||
run_through_template(list(templated_files.values()), VERBOSE, substitutions)
|
||||
run_through_template(templated_files.values(), VERBOSE, substitutions)
|
||||
|
||||
stylesheets = glob.glob(os.path.join(script_dir, "css", "*.css"))
|
||||
|
||||
for target_name, templated_file in list(templated_files.items()):
|
||||
for target_name, templated_file in templated_files.items():
|
||||
target = target_defs["targets"].get(target_name)
|
||||
version_label = None
|
||||
if target:
|
||||
version_label = target.get("version_label")
|
||||
if version_label:
|
||||
for old, new in list(substitutions.items()):
|
||||
for old, new in substitutions.items():
|
||||
version_label = version_label.replace(old, new)
|
||||
|
||||
rst_file = os.path.join(tmp_dir, "spec_%s.rst" % (target_name,))
|
||||
|
@ -481,7 +481,7 @@ def main(targets, dest_dir, keep_intermediates, substitutions):
|
|||
def list_targets():
|
||||
with open(os.path.join(spec_dir, "targets.yaml"), "r") as targ_file:
|
||||
target_defs = yaml.load(targ_file.read())
|
||||
targets = list(target_defs["targets"].keys())
|
||||
targets = target_defs["targets"].keys()
|
||||
print("\n".join(targets))
|
||||
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ prs = set()
|
|||
def getpage(url, page):
|
||||
resp = requests.get(url + str(page))
|
||||
|
||||
for link in list(resp.links.values()):
|
||||
for link in resp.links.values():
|
||||
if link['rel'] == 'last':
|
||||
pagecount = re.search('page=(.+?)', link['url']).group(1)
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ class AccessKeyStore(object):
|
|||
self.accessed_set = set()
|
||||
|
||||
def keys(self):
|
||||
return list(self.data.keys())
|
||||
return self.data.keys()
|
||||
|
||||
def add(self, key, unit_dict):
|
||||
self.data[key] = unit_dict
|
||||
|
|
|
@ -54,7 +54,7 @@ class Sections(object):
|
|||
)
|
||||
elif isinstance(section, dict):
|
||||
self.log(" Generated multiple sections:")
|
||||
for (k, v) in list(section.items()):
|
||||
for (k, v) in section.items():
|
||||
if not isinstance(k, str) or not isinstance(v, str):
|
||||
raise Exception(
|
||||
("Method %s returned multiple sections as a dict but " +
|
||||
|
|
|
@ -169,7 +169,7 @@ def main(input_module, files=None, out_dir=None, verbose=False, substitutions={}
|
|||
# print out valid section keys if no file supplied
|
||||
if not files:
|
||||
print("\nValid template variables:")
|
||||
for key in list(sections.keys()):
|
||||
for key in sections.keys():
|
||||
sec_text = "" if (len(sections[key]) > 75) else (
|
||||
"(Value: '%s')" % sections[key]
|
||||
)
|
||||
|
@ -211,7 +211,7 @@ def process_file(env, sections, filename, output_filename):
|
|||
# Do these substitutions outside of the ordinary templating system because
|
||||
# we want them to apply to things like the underlying swagger used to
|
||||
# generate the templates, not just the top-level sections.
|
||||
for old, new in list(substitutions.items()):
|
||||
for old, new in substitutions.items():
|
||||
output = output.replace(old, new)
|
||||
|
||||
with open(output_filename, "wb") as f:
|
||||
|
|
|
@ -86,7 +86,7 @@ class MatrixSections(Sections):
|
|||
# the key is the section name and the value is the value of the section
|
||||
def render_group_http_apis(self):
|
||||
# map all swagger_apis to the form $GROUP_http_api
|
||||
swagger_groups = list(self.units.get("swagger_apis").keys())
|
||||
swagger_groups = self.units.get("swagger_apis").keys()
|
||||
renders = {}
|
||||
for group in swagger_groups:
|
||||
sortFnOrPathList = None
|
||||
|
@ -134,7 +134,7 @@ class MatrixSections(Sections):
|
|||
"m.room.message#m.file"
|
||||
]
|
||||
other_msgtypes = [
|
||||
k for k in list(schemas.keys()) if k.startswith("m.room.message#") and
|
||||
k for k in schemas.keys() if k.startswith("m.room.message#") and
|
||||
k not in msgtype_order
|
||||
]
|
||||
for event_name in (msgtype_order + other_msgtypes):
|
||||
|
|
|
@ -35,7 +35,7 @@ from functools import reduce
|
|||
from six.moves.urllib.parse import urlencode
|
||||
|
||||
matrix_doc_dir=reduce(lambda acc,_: os.path.dirname(acc),
|
||||
list(range(1, 5)), os.path.abspath(__file__))
|
||||
range(1, 5), os.path.abspath(__file__))
|
||||
|
||||
HTTP_APIS = {
|
||||
os.path.join(matrix_doc_dir, "api/application-service"): "as",
|
||||
|
@ -126,7 +126,7 @@ def resolve_references(path, schema):
|
|||
else:
|
||||
result = OrderedDict()
|
||||
|
||||
for key, value in list(schema.items()):
|
||||
for key, value in schema.items():
|
||||
result[key] = resolve_references(path, value)
|
||||
return result
|
||||
elif isinstance(schema, list):
|
||||
|
@ -211,7 +211,7 @@ def get_json_schema_object_fields(obj, enforce_title=False):
|
|||
props = obj.get("patternProperties")
|
||||
if props:
|
||||
# try to replace horrible regex key names with pretty x-pattern ones
|
||||
for key_name in list(props.keys()):
|
||||
for key_name in props.keys():
|
||||
pretty_key = props[key_name].get("x-pattern")
|
||||
if pretty_key:
|
||||
props[pretty_key] = props[key_name]
|
||||
|
@ -382,7 +382,7 @@ def get_example_for_schema(schema):
|
|||
if 'properties' not in schema:
|
||||
raise Exception('"object" property has neither properties nor example')
|
||||
res = OrderedDict()
|
||||
for prop_name, prop in list(schema['properties'].items()):
|
||||
for prop_name, prop in schema['properties'].items():
|
||||
logger.debug("Parsing property %r" % prop_name)
|
||||
prop_example = get_example_for_schema(prop)
|
||||
res[prop_name] = prop_example
|
||||
|
@ -558,7 +558,7 @@ class MatrixUnits(Units):
|
|||
)
|
||||
if "headers" in good_response:
|
||||
headers = TypeTable()
|
||||
for (header_name, header) in list(good_response["headers"].items()):
|
||||
for (header_name, header) in good_response["headers"].items():
|
||||
headers.add_row(
|
||||
TypeTableRow(key=header_name, title=header["type"],
|
||||
desc=header["description"]),
|
||||
|
@ -617,7 +617,7 @@ class MatrixUnits(Units):
|
|||
|
||||
def load_swagger_apis(self):
|
||||
apis = {}
|
||||
for path, suffix in list(HTTP_APIS.items()):
|
||||
for path, suffix in HTTP_APIS.items():
|
||||
for filename in os.listdir(path):
|
||||
if not filename.endswith(".yaml"):
|
||||
continue
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue