mirror of
https://github.com/OMGeeky/google-apis-rs.git
synced 2026-01-07 03:56:42 +01:00
Merge branch 'python_fixes'
This commit is contained in:
2
.gitignore
vendored
2
.gitignore
vendored
@@ -5,7 +5,7 @@
|
||||
gen/doc
|
||||
*.go
|
||||
**/target/
|
||||
**/docs/
|
||||
gen/**/docs/
|
||||
**/build_html/
|
||||
.*.deps
|
||||
**/Cargo.lock
|
||||
|
||||
14
Makefile
14
Makefile
@@ -18,7 +18,9 @@ TPL := $(PYTHON) $(MAKO_RENDER)
|
||||
MKDOCS := $(shell pwd)/$(VENV_DIR)/bin/mkdocs
|
||||
GHP_IMPORT := $(shell pwd)/$(VENV_DIR)/bin/ghp-import
|
||||
|
||||
MAKO_SRC = src/mako
|
||||
GEN_SRC = src/generator
|
||||
GEN_LIB_SRC = $(GEN_SRC)/lib
|
||||
MAKO_SRC = src/generator/templates
|
||||
RUST_SRC = src/rust
|
||||
PREPROC_DIR = $(RUST_SRC)/preproc
|
||||
PREPROC = target/release/preproc
|
||||
@@ -36,9 +38,8 @@ else
|
||||
API_LIST := $(API_LIST)api-list.yaml
|
||||
endif
|
||||
API_JSON_FILES = $(shell find etc -type f -name '*-api.json')
|
||||
MAKO_LIB_DIR = $(MAKO_SRC)/lib
|
||||
MAKO_LIB_FILES = $(shell find $(MAKO_LIB_DIR) -type f -name '*.*')
|
||||
MAKO = export PREPROC=$(PREPROC); export PYTHONPATH=$(MAKO_LIB_DIR):$(PYTHONPATH); $(TPL) --template-dir '.'
|
||||
MAKO_LIB_FILES = $(shell find $(GEN_LIB_SRC) -type f -name '*.*')
|
||||
MAKO = export PREPROC=$(PREPROC); export PYTHONPATH=src:$(PYTHONPATH); $(TPL) --template-dir '.'
|
||||
MAKO_STANDARD_DEPENDENCIES = $(API_SHARED_INFO) $(MAKO_LIB_FILES) $(MAKO_RENDER) $(PREPROC)
|
||||
|
||||
help:
|
||||
@@ -71,7 +72,7 @@ $(PYTHON_BIN): $(VENV_BIN) requirements.txt
|
||||
python3 -m virtualenv -p python3 $(VENV_DIR)
|
||||
$@ -m pip install -r requirements.txt
|
||||
|
||||
$(MAKO_RENDER): $(PYTHON_BIN) $(wildcard $(MAKO_LIB_DIR)/*)
|
||||
$(MAKO_RENDER): $(PYTHON_BIN) $(wildcard $(GEN_LIB_SRC)/*)
|
||||
|
||||
# Explicitly NOT depending on $(MAKO_LIB_FILES), as it's quite stable and now takes 'too long' thanks
|
||||
# to a URL get call to the google discovery service
|
||||
@@ -98,6 +99,9 @@ test-gen: $(PYTHON_BIN)
|
||||
|
||||
test: test-gen
|
||||
|
||||
typecheck: $(PYTHON_BIN)
|
||||
$(PYTHON) -m pyright $(GEN_LIB_SRC)
|
||||
|
||||
clean: clean-all-api clean-all-cli docs-all-clean
|
||||
-rm -Rf $(VENV_DIR)
|
||||
-rm $(API_DEPS) $(CLI_DEPS)
|
||||
|
||||
@@ -83,7 +83,7 @@ directories:
|
||||
# where are all the API meta files
|
||||
api_base: etc/api
|
||||
# all mako source files
|
||||
mako_src: src/mako
|
||||
mako_src: src/generator/templates
|
||||
# The subdirectory to contain documentation from all APIs and related programs
|
||||
doc_subdir: doc
|
||||
cargo:
|
||||
|
||||
@@ -5,7 +5,7 @@ mkdocs:
|
||||
# if docs_dir changes, remember to update the sources as well.
|
||||
docs_dir: docs
|
||||
mako:
|
||||
post_processor_module: cli
|
||||
post_processor_module: "generator.lib.cli"
|
||||
make:
|
||||
id: cli
|
||||
target_name: CLIs
|
||||
|
||||
@@ -4,9 +4,13 @@
|
||||
# This module is part of Mako and is released under
|
||||
# the MIT License: http://www.opensource.org/licenses/mit-license.php
|
||||
from argparse import ArgumentParser
|
||||
from copy import deepcopy
|
||||
from importlib import import_module
|
||||
from os.path import isfile, dirname
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import yaml
|
||||
from mako.template import Template
|
||||
from mako.lookup import TemplateLookup
|
||||
from mako import exceptions
|
||||
@@ -212,27 +216,17 @@ def load_data(datafiles):
|
||||
:Returns: data (dict)
|
||||
:Raises: ImportError, ValueError
|
||||
"""
|
||||
imported_json = False
|
||||
imported_yaml = False
|
||||
mydata = {}
|
||||
|
||||
for filename, namespace in datafiles:
|
||||
data = None
|
||||
if filename[-5:].lower() == ".json":
|
||||
if not imported_json:
|
||||
try:
|
||||
import simplejson as json
|
||||
except ImportError:
|
||||
import json
|
||||
imported_json = True
|
||||
try:
|
||||
data = json.load(open(filename, 'r'))
|
||||
except ValueError as err:
|
||||
raise ValueError("Invalid JSON in file '%s'. (%s)" % (filename, str(err)))
|
||||
elif filename[-5:].lower() in (".yaml", ".yml"):
|
||||
if not imported_yaml:
|
||||
import yaml
|
||||
imported_yaml = True
|
||||
data = yaml.load_all(open(filename, 'r'), Loader=yaml.loader.FullLoader)
|
||||
data = list(data)[0]
|
||||
else:
|
||||
@@ -289,7 +283,7 @@ def cmdline(argv=None):
|
||||
post_processor = lambda r, of: r
|
||||
if options.post_process_python_module:
|
||||
fn_name = 'process_template_result'
|
||||
pm = __import__(options.post_process_python_module, globals(), locals(), [])
|
||||
pm = import_module(options.post_process_python_module)
|
||||
post_processor = getattr(pm, fn_name, None)
|
||||
if post_processor is None:
|
||||
raise AssertionError("python module '%s' must have a function called '%s'"
|
||||
|
||||
@@ -3,6 +3,7 @@ pyyaml<6
|
||||
mkdocs==0.11
|
||||
pytest
|
||||
pytest-cov
|
||||
importlib_resources
|
||||
codecov
|
||||
ghp-import
|
||||
pyright
|
||||
types-PyYAML
|
||||
|
||||
0
src/generator/lib/__tests__/__init__.py
Normal file
0
src/generator/lib/__tests__/__init__.py
Normal file
1369
src/generator/lib/__tests__/context_test.py
Normal file
1369
src/generator/lib/__tests__/context_test.py
Normal file
File diff suppressed because it is too large
Load Diff
0
src/generator/lib/__tests__/test_data/__init__.py
Normal file
0
src/generator/lib/__tests__/test_data/__init__.py
Normal file
@@ -1,3 +1,6 @@
|
||||
# Discovery document for photoslibrary, retrieved from:
|
||||
# https://photoslibrary.googleapis.com/$discovery/rest?version=v1
|
||||
DISCOVERY_DOC = r"""
|
||||
{
|
||||
"documentationLink": "https://developers.google.com/photos/",
|
||||
"id": "photoslibrary:v1",
|
||||
@@ -1332,3 +1335,4 @@
|
||||
"kind": "discovery#restDescription",
|
||||
"basePath": ""
|
||||
}
|
||||
"""
|
||||
@@ -2,16 +2,11 @@
|
||||
|
||||
import unittest
|
||||
import json
|
||||
import importlib_resources
|
||||
|
||||
from .util import to_api_version, library_name, re_find_replacements, to_rust_type, new_context
|
||||
from . import test_data
|
||||
from generator.lib.util import to_api_version, library_name, re_find_replacements, to_rust_type
|
||||
from .test_data.discovery_document import DISCOVERY_DOC
|
||||
|
||||
|
||||
def read_test_json_file(resource):
|
||||
data = importlib_resources.read_text(test_data, resource)
|
||||
return json.loads(data)
|
||||
|
||||
class UtilsTest(unittest.TestCase):
|
||||
|
||||
def test_to_version_ok(self):
|
||||
@@ -65,7 +60,7 @@ class UtilsTest(unittest.TestCase):
|
||||
self.assertEqual(ms[0], '{+project}')
|
||||
|
||||
def test_to_rust_type(self):
|
||||
full_api_schema = read_test_json_file('photoslibrary-api.json')
|
||||
full_api_schema = json.loads(DISCOVERY_DOC)
|
||||
|
||||
schemas = full_api_schema['schemas']
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import util
|
||||
import generator.lib.util as util
|
||||
|
||||
import os
|
||||
import re
|
||||
@@ -239,7 +239,7 @@ def field_to_value(f):
|
||||
return v
|
||||
|
||||
# split the result along split segments
|
||||
def process_template_result(r, output_file):
|
||||
def process_template_result(r, output_file: str):
|
||||
found = False
|
||||
dir = None
|
||||
if output_file:
|
||||
@@ -249,6 +249,8 @@ def process_template_result(r, output_file):
|
||||
# end handle output directory
|
||||
|
||||
for m in re_splitters.finditer(r):
|
||||
if not dir:
|
||||
raise RuntimeError("Missing directory; was output_file specified?")
|
||||
found = True
|
||||
fh = open(os.path.join(dir, m.group(1)), 'wb')
|
||||
fh.write(m.group(2).encode('UTF-8'))
|
||||
@@ -1,4 +1,4 @@
|
||||
<%! import util %>\
|
||||
<%! import generator.lib.util as util %>\
|
||||
|
||||
## source should be ${self.uri}
|
||||
## you need to escape the output, using a filter for example
|
||||
@@ -58,4 +58,4 @@ ${canonicalName}\
|
||||
|
||||
<%def name="program_name()" buffered="True">\
|
||||
${util.program_name(name, version)}\
|
||||
</%def>
|
||||
</%def>
|
||||
@@ -1,18 +1,20 @@
|
||||
import re
|
||||
import os
|
||||
from random import (randint, random, choice, seed)
|
||||
import collections
|
||||
from copy import deepcopy
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
from dataclasses import dataclass
|
||||
from random import (randint, random, choice, seed)
|
||||
from typing import Any, Dict, List, Mapping, Tuple
|
||||
from copy import deepcopy
|
||||
|
||||
seed(1337)
|
||||
|
||||
re_linestart = re.compile('^', flags=re.MULTILINE)
|
||||
re_spaces_after_newline = re.compile('^ {4}', flags=re.MULTILINE)
|
||||
re_first_4_spaces = re.compile('^ {1,4}', flags=re.MULTILINE)
|
||||
re_desc_parts = re.compile("((the part (names|properties) that you can include in the parameter value are)|(supported values are ))(.*?)\.", flags=re.IGNORECASE|re.MULTILINE)
|
||||
re_desc_parts = re.compile(r"((the part (names|properties) that you can include in the parameter value are)|(supported values are ))(.*?)\.", flags=re.IGNORECASE|re.MULTILINE)
|
||||
|
||||
re_find_replacements = re.compile("\{[/\+]?\w+\*?\}")
|
||||
re_find_replacements = re.compile(r"\{[/\+]?\w+\*?\}")
|
||||
|
||||
HTTP_METHODS = set(("OPTIONS", "GET", "POST", "PUT", "DELETE", "HEAD", "TRACE", "CONNECT", "PATCH" ))
|
||||
|
||||
@@ -120,9 +122,7 @@ def items(p):
|
||||
else:
|
||||
return p._items()
|
||||
|
||||
def custom_sorted(p):
|
||||
if not isinstance(p, list):
|
||||
assert(false, p, "unexpected type")
|
||||
def custom_sorted(p: List[Mapping[str, Any]]) -> List[Mapping[str, Any]]:
|
||||
return sorted(p, key = lambda p: p['name'])
|
||||
|
||||
# ==============================================================================
|
||||
@@ -342,11 +342,16 @@ def _assure_unique_type_name(schemas, tn):
|
||||
return tn
|
||||
|
||||
# map a json type to an rust type
|
||||
# sn = schema name
|
||||
# pn = property name
|
||||
# t = type dict
|
||||
# NOTE: In case you don't understand how this algorithm really works ... me neither - THE AUTHOR
|
||||
def to_rust_type(schemas, sn, pn, t, allow_optionals=True, _is_recursive=False):
|
||||
def to_rust_type(
|
||||
schemas,
|
||||
schema_name,
|
||||
property_name,
|
||||
t,
|
||||
allow_optionals=True,
|
||||
_is_recursive=False
|
||||
):
|
||||
def nested_type(nt):
|
||||
if 'items' in nt:
|
||||
nt = nt['items']
|
||||
@@ -355,8 +360,8 @@ def to_rust_type(schemas, sn, pn, t, allow_optionals=True, _is_recursive=False):
|
||||
else:
|
||||
assert(is_nested_type_property(nt))
|
||||
# It's a nested type - we take it literally like $ref, but generate a name for the type ourselves
|
||||
return _assure_unique_type_name(schemas, nested_type_name(sn, pn))
|
||||
return to_rust_type(schemas, sn, pn, nt, allow_optionals=False, _is_recursive=True)
|
||||
return _assure_unique_type_name(schemas, nested_type_name(schema_name, property_name))
|
||||
return to_rust_type(schemas, schema_name, property_name, nt, allow_optionals=False, _is_recursive=True)
|
||||
|
||||
def wrap_type(tn):
|
||||
if allow_optionals:
|
||||
@@ -369,7 +374,7 @@ def to_rust_type(schemas, sn, pn, t, allow_optionals=True, _is_recursive=False):
|
||||
# which is fine for now. 'allow_optionals' implicitly restricts type boxing for simple types - it
|
||||
# usually is on on the first call, and off when recursion is involved.
|
||||
tn = t[TREF]
|
||||
if not _is_recursive and tn == sn:
|
||||
if not _is_recursive and tn == schema_name:
|
||||
tn = 'Option<Box<%s>>' % tn
|
||||
return wrap_type(tn)
|
||||
try:
|
||||
@@ -490,8 +495,7 @@ def is_schema_with_optionals(schema_markers):
|
||||
# -------------------------
|
||||
## @name Activity Utilities
|
||||
# @{
|
||||
# return (category, name|None, method)
|
||||
def activity_split(fqan):
|
||||
def activity_split(fqan: str) -> Tuple[str, str, str]:
|
||||
t = fqan.split('.')
|
||||
mt = t[2:]
|
||||
if not mt:
|
||||
@@ -513,10 +517,6 @@ def to_fqan(name, resource, method):
|
||||
def activity_name_to_type_name(an):
|
||||
return canonical_type_name(an)[:-1]
|
||||
|
||||
# yields (category, resource, activity, activity_data)
|
||||
def iter_acitivities(c):
|
||||
return ((activity_split(an) + [a]) for an, a in c.fqan_map.items())
|
||||
|
||||
# return a list of parameter structures of all params of the given method dict
|
||||
# apply a prune filter to restrict the set of returned parameters.
|
||||
# The order will always be: partOrder + alpha
|
||||
@@ -663,31 +663,37 @@ It should be used to handle progress information, and to implement a certain lev
|
||||
## -- End Activity Utilities -- @}
|
||||
|
||||
|
||||
Context = collections.namedtuple('Context', ['sta_map', 'fqan_map', 'rta_map', 'rtc_map', 'schemas'])
|
||||
@dataclass
|
||||
class Context:
|
||||
sta_map: Dict[str, Any]
|
||||
fqan_map: Dict[str, Any]
|
||||
rta_map: Dict[str, Any]
|
||||
rtc_map: Dict[str, Any]
|
||||
schemas: Dict[str, Any]
|
||||
|
||||
# return a newly build context from the given data
|
||||
def new_context(schemas, resources, methods):
|
||||
def new_context(schemas: Dict[str, Dict[str, Any]], resources: Dict[str, Any]) -> Context:
|
||||
# Returns (A, B) where
|
||||
# A: { SchemaTypeName -> { fqan -> ['request'|'response', ...]}
|
||||
# B: { fqan -> activity_method_data }
|
||||
# fqan = fully qualified activity name
|
||||
def build_activity_mappings(activities, res = None, fqan = None):
|
||||
def build_activity_mappings(resources: Dict[str, Any], res = None, fqan = None) -> Tuple[Dict[str, Any], Dict[str, Any]]:
|
||||
if res is None:
|
||||
res = dict()
|
||||
if fqan is None:
|
||||
fqan = dict()
|
||||
for k,a in activities.items():
|
||||
for k,a in resources.items():
|
||||
if 'resources' in a:
|
||||
build_activity_mappings(a.resources, res, fqan)
|
||||
build_activity_mappings(a["resources"], res, fqan)
|
||||
if 'methods' not in a:
|
||||
continue
|
||||
for mn, m in a.methods.items():
|
||||
assert m.id not in fqan
|
||||
category, resource, method = activity_split(m.id)
|
||||
for mn, m in a["methods"].items():
|
||||
assert m["id"] not in fqan
|
||||
category, resource, method = activity_split(m["id"])
|
||||
# This may be another name by which people try to find the method.
|
||||
# As it has no resource, we put in a 'fake resource' (METHODS_RESOURCE), which
|
||||
# needs some special treatment only in key-spots
|
||||
fqan_key = m.id
|
||||
fqan_key = m["id"]
|
||||
if resource == METHODS_RESOURCE:
|
||||
fqan_key = to_fqan(category, resource, method)
|
||||
fqan[fqan_key] = m
|
||||
@@ -697,7 +703,7 @@ def new_context(schemas, resources, methods):
|
||||
continue
|
||||
tn = to_rust_type(schemas, None, None, t, allow_optionals=False)
|
||||
info = res.setdefault(tn, dict())
|
||||
io_info = info.setdefault(m.id, [])
|
||||
io_info = info.setdefault(m["id"], [])
|
||||
io_info.append(in_out_type_name)
|
||||
# end for each io type
|
||||
|
||||
@@ -707,8 +713,8 @@ def new_context(schemas, resources, methods):
|
||||
# the latter is used to deduce the resource name
|
||||
tn = activity_name_to_type_name(resource)
|
||||
info = res.setdefault(tn, dict())
|
||||
if m.id not in info:
|
||||
info.setdefault(m.id, [])
|
||||
if m["id"] not in info:
|
||||
info.setdefault(m["id"], [])
|
||||
# end handle other cases
|
||||
# end for each method
|
||||
# end for each activity
|
||||
@@ -717,7 +723,7 @@ def new_context(schemas, resources, methods):
|
||||
|
||||
# A dict of {s.id -> schema} , with all schemas having the 'parents' key set with [s.id, ...] of all parents
|
||||
# in order of traversal, [-1] is first parent, [0] is the root of them all
|
||||
def build_schema_map():
|
||||
def build_schema_map() -> Dict[str, Any]:
|
||||
# 'type' in t and t.type == 'object' and 'properties' in t or ('items' in t and 'properties' in t.items)
|
||||
PARENT = 'parents'
|
||||
USED_BY = 'used_by'
|
||||
@@ -729,8 +735,8 @@ def new_context(schemas, resources, methods):
|
||||
def link_used(s, rs):
|
||||
if TREF in s:
|
||||
l = assure_list(all_schemas[s[TREF]], USED_BY)
|
||||
if rs.id not in l:
|
||||
l.append(rs.id)
|
||||
if rs["id"] not in l:
|
||||
l.append(rs["id"])
|
||||
|
||||
def append_unique(l, s):
|
||||
if s not in l:
|
||||
@@ -738,14 +744,14 @@ def new_context(schemas, resources, methods):
|
||||
return l
|
||||
|
||||
all_schemas = deepcopy(schemas)
|
||||
def recurse_properties(prefix, rs, s, parent_ids):
|
||||
def recurse_properties(prefix: str, rs: Any, s: Any, parent_ids: List[str]):
|
||||
assure_list(s, USED_BY)
|
||||
assure_list(s, PARENT).extend(parent_ids)
|
||||
link_used(s, rs)
|
||||
|
||||
if is_nested_type_property(s) and 'id' not in s:
|
||||
s.id = prefix
|
||||
all_schemas[s.id] = s
|
||||
all_schemas[s["id"]] = s
|
||||
rs = s
|
||||
# end this is already a perfectly valid type
|
||||
|
||||
@@ -756,61 +762,53 @@ def new_context(schemas, resources, methods):
|
||||
if is_nested_type_property(p):
|
||||
ns = deepcopy(p)
|
||||
ns.id = _assure_unique_type_name(schemas, nested_type_name(prefix, pn))
|
||||
all_schemas[ns.id] = ns
|
||||
all_schemas[ns["id"]] = ns
|
||||
|
||||
# To allow us recursing arrays, we simply put items one level up
|
||||
if 'items' in p:
|
||||
ns.update((k, deepcopy(v)) for k, v in p.items.items())
|
||||
ns.update((k, deepcopy(v)) for k, v in p["items"].items())
|
||||
|
||||
recurse_properties(ns.id, ns, ns, append_unique(parent_ids, rs.id))
|
||||
recurse_properties(ns.id, ns, ns, append_unique(parent_ids, rs["id"]))
|
||||
elif is_map_prop(p):
|
||||
recurse_properties(nested_type_name(prefix, pn), rs,
|
||||
p.additionalProperties, append_unique(parent_ids, rs.id))
|
||||
p["additionalProperties"], append_unique(parent_ids, rs["id"]))
|
||||
elif 'items' in p:
|
||||
recurse_properties(nested_type_name(prefix, pn), rs,
|
||||
p.items, append_unique(parent_ids, rs.id))
|
||||
elif 'variant' in p:
|
||||
for enum in p.variant.map:
|
||||
recurse_properties(prefix, rs, enum, parent_ids)
|
||||
p["items"], append_unique(parent_ids, rs["id"]))
|
||||
# end handle prop itself
|
||||
# end for each property
|
||||
# end utility
|
||||
for s in all_schemas.values():
|
||||
recurse_properties(s.id, s, s, [])
|
||||
recurse_properties(s["id"], s, s, [])
|
||||
# end for each schema
|
||||
|
||||
return all_schemas
|
||||
# end utility
|
||||
|
||||
all_schemas = schemas and build_schema_map() or dict()
|
||||
if not (resources or methods):
|
||||
if not resources:
|
||||
return Context(dict(), dict(), dict(), dict(), all_schemas)
|
||||
|
||||
rta_map, rtc_map, sta_map, fqan_map = dict(), dict(), dict(), dict()
|
||||
rta_map: Dict[str, Any] = {}
|
||||
rtc_map: Dict[str, Any] = {}
|
||||
sta_map: Dict[str, Any] = {}
|
||||
fqan_map: Dict[str, Any] = {}
|
||||
|
||||
sources = list()
|
||||
if bool(resources):
|
||||
sources.append(resources)
|
||||
if bool(methods):
|
||||
sources.append({None : type(methods)({'methods' : methods})})
|
||||
|
||||
for data_source in sources:
|
||||
_sta_map, _fqan_map = build_activity_mappings(data_source)
|
||||
for an in _fqan_map:
|
||||
category, resource, activity = activity_split(an)
|
||||
rta_map.setdefault(resource, list()).append(activity)
|
||||
assert rtc_map.setdefault(resource, category) == category
|
||||
# end for each fqan
|
||||
sta_map.update(_sta_map)
|
||||
fqan_map.update(_fqan_map)
|
||||
# end for each data source
|
||||
_sta_map, _fqan_map = build_activity_mappings(resources)
|
||||
for an in _fqan_map:
|
||||
category, resource, activity = activity_split(an)
|
||||
rta_map.setdefault(resource, list()).append(activity)
|
||||
assert rtc_map.setdefault(resource, category) == category
|
||||
# end for each fqan
|
||||
sta_map.update(_sta_map)
|
||||
fqan_map.update(_fqan_map)
|
||||
return Context(sta_map, fqan_map, rta_map, rtc_map, all_schemas)
|
||||
|
||||
def _is_special_version(v):
|
||||
return v.endswith('alpha') or v.endswith('beta')
|
||||
|
||||
def to_api_version(v):
|
||||
m = re.search("_?v(\d(\.\d)*)_?", v)
|
||||
m = re.search(r"_?v(\d(\.\d)*)_?", v)
|
||||
if not m and _is_special_version(v):
|
||||
return v
|
||||
assert m, "Expected to find a version within '%s'" % v
|
||||
@@ -1,5 +1,5 @@
|
||||
<%! from util import (estr, enclose_in, hash_comment, library_to_crate_name, to_extern_crate_name) %>\
|
||||
<%namespace name="util" file="lib/util.mako"/>\
|
||||
<%! from generator.lib.util import (estr, enclose_in, hash_comment, library_to_crate_name, to_extern_crate_name) %>\
|
||||
<%namespace name="util" file="../lib/util.mako"/>\
|
||||
<%block filter="hash_comment">\
|
||||
<%util:gen_info source="${self.uri}" />\
|
||||
</%block>
|
||||
@@ -1,6 +1,6 @@
|
||||
## -*- coding: utf-8 -*-
|
||||
<%! import util %>\
|
||||
<%namespace name="mutil" file="lib/util.mako"/>\
|
||||
<%! import generator.lib.util as util %>\
|
||||
<%namespace name="mutil" file="../lib/util.mako"/>\
|
||||
<%block filter="util.markdown_comment">\
|
||||
<%mutil:gen_info source="${self.uri}" />\
|
||||
</%block>
|
||||
@@ -1,13 +1,13 @@
|
||||
<%
|
||||
from util import (markdown_comment, new_context)
|
||||
c = new_context(schemas, resources, context.get('methods'))
|
||||
from generator.lib.util import (markdown_comment, new_context)
|
||||
c = new_context(schemas, resources)
|
||||
%>\
|
||||
<%namespace name="lib" file="lib/lib.mako"/>\
|
||||
<%namespace name="util" file="../lib/util.mako"/>\
|
||||
<%namespace name="util" file="../../lib/util.mako"/>\
|
||||
<%block filter="markdown_comment">\
|
||||
<%util:gen_info source="${self.uri}" />\
|
||||
</%block>
|
||||
The `${util.crate_name()}` library allows access to all features of the *Google ${util.canonical_name()}* service.
|
||||
|
||||
${lib.docs(c, rust_doc=False)}
|
||||
<%lib:license />
|
||||
<%lib:license />
|
||||
@@ -1,15 +1,15 @@
|
||||
<%namespace name="lib" file="lib/lib.mako"/>\
|
||||
<%namespace name="util" file="../lib/util.mako"/>\
|
||||
<%namespace name="util" file="../../lib/util.mako"/>\
|
||||
<%namespace name="rbuild" file="lib/rbuild.mako"/>\
|
||||
<%namespace name="mbuild" file="lib/mbuild.mako"/>\
|
||||
<%namespace name="schema" file="lib/schema.mako"/>\
|
||||
<%
|
||||
from util import (new_context, rust_comment, rust_doc_comment, rust_module_doc_comment,
|
||||
from generator.lib.util import (new_context, rust_comment, rust_doc_comment, rust_module_doc_comment,
|
||||
rb_type, hub_type, mangle_ident, hub_type_params_s,
|
||||
rb_type_params_s, find_fattest_resource, HUB_TYPE_PARAMETERS, METHODS_RESOURCE,
|
||||
UNUSED_TYPE_MARKER, schema_markers)
|
||||
|
||||
c = new_context(schemas, resources, context.get('methods'))
|
||||
c = new_context(schemas, resources)
|
||||
hub_type = hub_type(c.schemas, util.canonical_name())
|
||||
ht_params = hub_type_params_s()
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
<%namespace name="lib" file="lib/lib.mako"/>\
|
||||
<%namespace name="util" file="../lib/util.mako"/>\
|
||||
<%namespace name="util" file="../../lib/util.mako"/>\
|
||||
<%
|
||||
from util import (new_context, rust_comment, rust_module_doc_comment)
|
||||
from generator.lib.util import (new_context, rust_comment, rust_module_doc_comment)
|
||||
|
||||
c = new_context(schemas, resources, context.get('methods'))
|
||||
c = new_context(schemas, resources)
|
||||
%>\
|
||||
<%block filter="rust_comment">\
|
||||
<%util:gen_info source="${self.uri}" />\
|
||||
@@ -20,17 +20,17 @@ ${lib.docs(c)}
|
||||
#![allow(unused_imports, unused_mut, dead_code)]
|
||||
|
||||
<%namespace name="lib" file="lib/lib.mako"/>\
|
||||
<%namespace name="util" file="../lib/util.mako"/>\
|
||||
<%namespace name="util" file="../../lib/util.mako"/>\
|
||||
<%namespace name="rbuild" file="lib/rbuild.mako"/>\
|
||||
<%namespace name="mbuild" file="lib/mbuild.mako"/>\
|
||||
<%namespace name="schema" file="lib/schema.mako"/>\
|
||||
<%
|
||||
from util import (new_context, rust_comment, rust_doc_comment, rust_module_doc_comment,
|
||||
from generator.lib.util import (new_context, rust_comment, rust_doc_comment, rust_module_doc_comment,
|
||||
rb_type, hub_type, mangle_ident, hub_type_params_s,
|
||||
rb_type_params_s, find_fattest_resource, HUB_TYPE_PARAMETERS, METHODS_RESOURCE,
|
||||
UNUSED_TYPE_MARKER, schema_markers)
|
||||
|
||||
c = new_context(schemas, resources, context.get('methods'))
|
||||
c = new_context(schemas, resources)
|
||||
hub_type = hub_type(c.schemas, util.canonical_name())
|
||||
ht_params = hub_type_params_s()
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<%!
|
||||
from util import (activity_split, put_and, md_italic, split_camelcase_s, canonical_type_name, hub_type,
|
||||
from generator.lib.util import (activity_split, put_and, md_italic, split_camelcase_s, canonical_type_name, hub_type,
|
||||
rust_test_fn_invisible, rust_doc_test_norun, rust_doc_comment, markdown_rust_block,
|
||||
unindent_first_by, mangle_ident, mb_type, singular, scope_url_to_variant,
|
||||
PART_MARKER_TRAIT, RESOURCE_MARKER_TRAIT, CALL_BUILDER_MARKERT_TRAIT,
|
||||
@@ -12,7 +12,7 @@
|
||||
def pretty_name(name):
|
||||
return ' '.join(split_camelcase_s(name).split('.'))
|
||||
%>\
|
||||
<%namespace name="util" file="../../lib/util.mako"/>\
|
||||
<%namespace name="util" file="../../../lib/util.mako"/>\
|
||||
<%namespace name="mbuild" file="mbuild.mako"/>\
|
||||
|
||||
## If rust-doc is True, examples will be made to work for rust doc tests. Otherwise they are set
|
||||
@@ -1,5 +1,5 @@
|
||||
<%!
|
||||
from util import (put_and, rust_test_fn_invisible, rust_doc_test_norun, rust_doc_comment,
|
||||
from generator.lib.util import (put_and, rust_test_fn_invisible, rust_doc_test_norun, rust_doc_comment,
|
||||
rb_type, mb_type, singular, hub_type, to_fqan, indent_all_but_first_by,
|
||||
activity_rust_type, mangle_ident, activity_input_type, get_word,
|
||||
split_camelcase_s, property, is_pod_property, TREF, IO_REQUEST,
|
||||
@@ -28,7 +28,7 @@
|
||||
part_desc = part_desc[:-1]
|
||||
return part_desc
|
||||
%>\
|
||||
<%namespace name="util" file="../../lib/util.mako"/>\
|
||||
<%namespace name="util" file="../../../lib/util.mako"/>\
|
||||
<%namespace name="lib" file="lib.mako"/>\
|
||||
|
||||
## Creates a method builder type
|
||||
@@ -1,5 +1,5 @@
|
||||
<%!
|
||||
from util import (put_and, rust_test_fn_invisible, rust_doc_test_norun, rust_doc_comment,
|
||||
from generator.lib.util import (put_and, rust_test_fn_invisible, rust_doc_test_norun, rust_doc_comment,
|
||||
rb_type, singular, hub_type, mangle_ident, mb_type, property,
|
||||
to_fqan, indent_all_but_first_by, is_repeated_property, is_required_property,
|
||||
activity_input_type, TREF, IO_REQUEST, schema_to_required_property,
|
||||
@@ -8,7 +8,7 @@
|
||||
struct_type_bounds_s, METHODS_RESOURCE, SPACES_PER_TAB, prefix_all_but_first_with,
|
||||
METHODS_BUILDER_MARKER_TRAIT, remove_empty_lines, method_default_scope, rust_doc_sanitize)
|
||||
%>\
|
||||
<%namespace name="util" file="../../lib/util.mako"/>\
|
||||
<%namespace name="util" file="../../../lib/util.mako"/>\
|
||||
<%namespace name="lib" file="lib.mako"/>\
|
||||
|
||||
## Creates a Resource builder type
|
||||
@@ -1,5 +1,5 @@
|
||||
<%!
|
||||
from util import (schema_markers, rust_doc_comment, mangle_ident, to_rust_type, put_and,
|
||||
from generator.lib.util import (schema_markers, rust_doc_comment, mangle_ident, to_rust_type, put_and,
|
||||
IO_TYPES, activity_split, enclose_in, REQUEST_MARKER_TRAIT, mb_type, indent_all_but_first_by,
|
||||
NESTED_TYPE_SUFFIX, RESPONSE_MARKER_TRAIT, split_camelcase_s, METHODS_RESOURCE,
|
||||
PART_MARKER_TRAIT, canonical_type_name, TO_PARTS_MARKER, UNUSED_TYPE_MARKER, is_schema_with_optionals,
|
||||
@@ -1,10 +1,10 @@
|
||||
<%
|
||||
from util import (markdown_comment, new_context)
|
||||
from cli import (CONFIG_DIR, CONFIG_DIR_FLAG, SCOPE_FLAG, application_secret_path, DEBUG_FLAG)
|
||||
from generator.lib.util import (markdown_comment, new_context)
|
||||
from generator.lib.cli import (CONFIG_DIR, CONFIG_DIR_FLAG, SCOPE_FLAG, application_secret_path, DEBUG_FLAG)
|
||||
|
||||
c = new_context(schemas, resources, context.get('methods'))
|
||||
c = new_context(schemas, resources)
|
||||
%>\
|
||||
<%namespace name="util" file="../lib/util.mako"/>\
|
||||
<%namespace name="util" file="../../lib/util.mako"/>\
|
||||
<%namespace name="argparse" file="lib/argparse.mako"/>\
|
||||
<%block filter="markdown_comment">\
|
||||
<%util:gen_info source="${self.uri}" />\
|
||||
@@ -96,4 +96,4 @@ You may consider redirecting standard error into a file for ease of use, e.g. `$
|
||||
[scopes]: https://developers.google.com/+/api/oauth#scopes
|
||||
[revoke-access]: http://webapps.stackexchange.com/a/30849
|
||||
[google-dev-console]: https://console.developers.google.com/
|
||||
[google-project-new]: https://developers.google.com/console/help/new/
|
||||
[google-project-new]: https://developers.google.com/console/help/new/
|
||||
@@ -1,8 +1,8 @@
|
||||
<%namespace name="util" file="../../lib/util.mako"/>\
|
||||
<%namespace name="util" file="../../../lib/util.mako"/>\
|
||||
<%!
|
||||
from mako.filters import xml_escape
|
||||
from util import (hash_comment, new_context, method_default_scope, indent_all_but_first_by, is_repeated_property, custom_sorted)
|
||||
from cli import (subcommand_md_filename, new_method_context, SPLIT_START, SPLIT_END, pretty, SCOPE_FLAG,
|
||||
from generator.lib.util import (hash_comment, new_context, method_default_scope, indent_all_but_first_by, is_repeated_property, custom_sorted)
|
||||
from generator.lib.cli import (subcommand_md_filename, new_method_context, SPLIT_START, SPLIT_END, pretty, SCOPE_FLAG,
|
||||
mangle_subcommand, is_request_value_property, FIELD_SEP, PARAM_FLAG, UPLOAD_FLAG, docopt_mode,
|
||||
FILE_ARG, MIME_ARG, OUT_ARG, OUTPUT_FLAG, to_cli_schema, cli_schema_to_yaml, SchemaEntry,
|
||||
STRUCT_FLAG, field_to_value, CTYPE_ARRAY, CTYPE_MAP, to_docopt_arg, FILE_FLAG, MIME_FLAG,
|
||||
@@ -15,7 +15,7 @@
|
||||
NO_DESC = 'No description provided.'
|
||||
%>\
|
||||
<%
|
||||
c = new_context(schemas, resources, context.get('methods'))
|
||||
c = new_context(schemas, resources)
|
||||
%>\
|
||||
% for resource in sorted(c.rta_map.keys()):
|
||||
% for method in sorted(c.rta_map[resource]):
|
||||
@@ -220,4 +220,4 @@ ${self._list_schem_args(f, cursor_tokens, first_flag)}
|
||||
%>\
|
||||
% endif
|
||||
% endfor
|
||||
</%def>
|
||||
</%def>
|
||||
@@ -1,9 +1,9 @@
|
||||
<%namespace name="util" file="../../lib/util.mako"/>\
|
||||
<%namespace name="util" file="../../../lib/util.mako"/>\
|
||||
<%!
|
||||
import os
|
||||
|
||||
from util import (put_and, supports_scopes, api_index, indent_by, enclose_in, put_and, escape_rust_string)
|
||||
from cli import (mangle_subcommand, new_method_context, PARAM_FLAG, STRUCT_FLAG, UPLOAD_FLAG, OUTPUT_FLAG, VALUE_ARG,
|
||||
from generator.lib.util import (put_and, supports_scopes, api_index, indent_by, enclose_in, put_and, escape_rust_string)
|
||||
from generator.lib.cli import (mangle_subcommand, new_method_context, PARAM_FLAG, STRUCT_FLAG, UPLOAD_FLAG, OUTPUT_FLAG, VALUE_ARG,
|
||||
CONFIG_DIR, SCOPE_FLAG, is_request_value_property, FIELD_SEP, docopt_mode, FILE_ARG, MIME_ARG, OUT_ARG,
|
||||
CONFIG_DIR_FLAG, KEY_VALUE_ARG, to_docopt_arg, DEBUG_FLAG, MODE_ARG, SCOPE_ARG,
|
||||
CONFIG_DIR_ARG, FILE_FLAG, MIME_FLAG, subcommand_md_filename)
|
||||
@@ -1,9 +1,9 @@
|
||||
<%namespace name="util" file="../../lib/util.mako"/>\
|
||||
<%namespace name="util" file="../../../lib/util.mako"/>\
|
||||
<%!
|
||||
from util import (hub_type, mangle_ident, indent_all_but_first_by, activity_rust_type, setter_fn_name, ADD_PARAM_FN,
|
||||
from generator.lib.util import (hub_type, mangle_ident, indent_all_but_first_by, activity_rust_type, setter_fn_name, ADD_PARAM_FN,
|
||||
upload_action_fn, is_schema_with_optionals, schema_markers, indent_by, method_default_scope,
|
||||
ADD_SCOPE_FN, TREF, enclose_in)
|
||||
from cli import (mangle_subcommand, new_method_context, PARAM_FLAG, STRUCT_FLAG, OUTPUT_FLAG, VALUE_ARG,
|
||||
from generator.lib.cli import (mangle_subcommand, new_method_context, PARAM_FLAG, STRUCT_FLAG, OUTPUT_FLAG, VALUE_ARG,
|
||||
CONFIG_DIR, SCOPE_FLAG, is_request_value_property, FIELD_SEP, docopt_mode, FILE_ARG, MIME_ARG, OUT_ARG,
|
||||
call_method_ident, POD_TYPES, opt_value, ident, JSON_TYPE_VALUE_MAP,
|
||||
KEY_VALUE_ARG, to_cli_schema, SchemaEntry, CTYPE_POD, actual_json_type, CTYPE_MAP, CTYPE_ARRAY,
|
||||
@@ -1,12 +1,12 @@
|
||||
<%namespace name="argparse" file="lib/argparse.mako"/>\
|
||||
<%namespace name="engine" file="lib/engine.mako"/>\
|
||||
<%namespace name="util" file="../lib/util.mako"/>\
|
||||
<%namespace name="util" file="../../lib/util.mako"/>\
|
||||
<%
|
||||
from util import (new_context, rust_comment, to_extern_crate_name, library_to_crate_name, library_name,
|
||||
from generator.lib.util import (new_context, rust_comment, to_extern_crate_name, library_to_crate_name, library_name,
|
||||
indent_all_but_first_by)
|
||||
from cli import OUT_ARG, DEBUG_FLAG, opt_value
|
||||
from generator.lib.cli import OUT_ARG, DEBUG_FLAG, opt_value
|
||||
|
||||
c = new_context(schemas, resources, context.get('methods'))
|
||||
c = new_context(schemas, resources)
|
||||
default_user_agent = "google-cli-rust-client/" + cargo.build_version
|
||||
%>\
|
||||
<%block filter="rust_comment">\
|
||||
@@ -1,10 +1,10 @@
|
||||
<%
|
||||
from util import (put_and, new_context)
|
||||
from cli import (subcommand_md_filename, mangle_subcommand, pretty)
|
||||
from generator.lib.util import (put_and, new_context)
|
||||
from generator.lib.cli import (subcommand_md_filename, mangle_subcommand, pretty)
|
||||
|
||||
c = new_context(schemas, resources, context.get('methods'))
|
||||
c = new_context(schemas, resources)
|
||||
%>\
|
||||
<%namespace name="util" file="../lib/util.mako"/>\
|
||||
<%namespace name="util" file="../../lib/util.mako"/>\
|
||||
site_name: ${util.canonical_name()} v${util.crate_version()}
|
||||
site_url: ${cargo.doc_base_url}/${util.crate_name()}
|
||||
site_description: A complete library to interact with ${util.canonical_name()} (protocol ${version})
|
||||
@@ -58,7 +58,7 @@
|
||||
<% continue %>\
|
||||
% endif
|
||||
<%
|
||||
import util
|
||||
import generator.lib.util as util
|
||||
import os
|
||||
import json
|
||||
|
||||
@@ -191,7 +191,7 @@ help${agsuffix}:
|
||||
|
||||
% for info in (apis.get('items') or []):
|
||||
<%
|
||||
import util
|
||||
import generator.lib.util as util
|
||||
import os
|
||||
name = util.normalize_library_name(info['name'])
|
||||
target = util.api_json_path(directories.api_base, name, info['version'])
|
||||
@@ -2,7 +2,7 @@
|
||||
import json
|
||||
import os
|
||||
import yaml
|
||||
from util import (api_json_path, library_name, library_to_crate_name,
|
||||
from generator.lib.util import (api_json_path, library_name, library_to_crate_name,
|
||||
gen_crate_dir, api_index, crates_io_url, program_name,
|
||||
crate_version)
|
||||
|
||||
Reference in New Issue
Block a user