Restructure src dir

Make a few changes in the toplevel src dir to help separate
templates from code. Specifically, we rename `src/mako` to
`src/generator`, and nest the mako templates inside a
`src/generator/templates` dir.

This isolates most Python code into the `src/generator/lib` dir.
This commit is contained in:
Kyle Gentle
2022-08-06 16:08:06 -04:00
parent 5e1c0c857e
commit 08552c4364
30 changed files with 65 additions and 59 deletions

View File

View File

263
src/generator/lib/cli.py Normal file
View File

@@ -0,0 +1,263 @@
import generator.lib.util as util
import os
import re
import collections
from copy import deepcopy
from random import (randint, random, choice)
SPLIT_START = '>>>>>>>'
SPLIT_END = '<<<<<<<'
PARAM_FLAG = 'p'
STRUCT_FLAG = 'r'
UPLOAD_FLAG = 'u'
OUTPUT_FLAG = 'o'
VALUE_ARG = 'v'
KEY_VALUE_ARG = 'kv'
SCOPE_FLAG = 'scope'
CONFIG_DIR_FLAG = 'config-dir'
DEBUG_FLAG = 'debug'
DEFAULT_MIME = 'application/octet-stream'
MODE_ARG = 'mode'
FILE_ARG = 'file'
FILE_FLAG = 'f'
MIME_ARG = 'mime'
MIME_FLAG = 'm'
OUT_ARG = 'out'
SCOPE_ARG = 'url'
CONFIG_DIR_ARG = 'folder'
FIELD_SEP = '.'
CONFIG_DIR = '~/.google-service-cli'
POD_TYPES = set(('boolean', 'integer', 'number', 'uint32', 'double', 'float', 'int32', 'int64', 'uint64', 'string'))
re_splitters = re.compile(r"%s ([\w\-\.]+)\n(.*?)\n%s" % (SPLIT_START, SPLIT_END), re.MULTILINE|re.DOTALL)
MethodContext = collections.namedtuple('MethodContext', ['m', 'response_schema', 'params', 'request_value',
'media_params' ,'required_props', 'optional_props',
'part_prop'])
CTYPE_POD = 'pod'
CTYPE_ARRAY = 'list'
CTYPE_MAP = 'map'
SchemaEntry = collections.namedtuple('SchemaEntry', ['container_type', 'actual_property', 'property'])
JSON_TYPE_RND_MAP = {'boolean': lambda: str(bool(randint(0, 1))).lower(),
'integer' : lambda: randint(0, 100),
'uint32' : lambda: randint(0, 100),
'uint64' : lambda: randint(0, 65556),
'float' : lambda: random(),
'double' : lambda: random(),
'number' : lambda: random(),
'int32' : lambda: randint(-101, -1),
'int64' : lambda: randint(-101, -1),
'string': lambda: '%s' % choice(util.words).lower()}
JSON_TYPE_TO_ENUM_MAP = {'boolean' : 'Boolean',
'integer' : 'Int',
'number' : 'Float',
'uint32' : 'Int',
'double' : 'Float',
'float' : 'Float',
'int32' : 'Int',
'any' : 'String', # TODO: Figure out how to handle it. It's 'interface' in Go ...
'int64' : 'Int',
'uint64' : 'Uint',
'string' : 'String'}
CTYPE_TO_ENUM_MAP = {CTYPE_POD: 'Pod',
CTYPE_ARRAY: 'Vec',
CTYPE_MAP: 'Map'}
JSON_TYPE_VALUE_MAP = {'boolean': 'false',
'integer' : '-0',
'uint32' : '0',
'uint64' : '0',
'float' : '0.0',
'double' : '0.0',
'number' : '0.0',
'int32' : '-0',
'int64' : '-0',
'string': ''}
assert len(set(JSON_TYPE_RND_MAP.keys()) ^ POD_TYPES) == 0
def new_method_context(resource, method, c):
m = c.fqan_map[util.to_fqan(c.rtc_map[resource], resource, method)]
response_schema = util.method_response(c, m)
params, request_value = util.build_all_params(c, m)
# let CLIs not support resumable downloads or uploads for now, but don't affect the APIs
media_params = list(filter(lambda mp: mp.protocol == "simple", util.method_media_params(m)))
required_props, optional_props, part_prop = util.organize_params(params, request_value)
return MethodContext(m, response_schema, params, request_value, media_params,
required_props, optional_props, part_prop)
def comma_sep_fields(fields):
return ', '.join('"%s"' % mangle_subcommand(f) for f in sorted(fields))
# Returns a string representing a string-vector of mangled names
# fields is an iterator
def field_vec(fields):
return "vec![%s]" % comma_sep_fields(fields)
def pretty(n):
return ' '.join(s.capitalize() for s in mangle_subcommand(n).split('-'))
def is_request_value_property(mc, p):
return mc.request_value and mc.request_value.id == p.get(util.TREF)
# transform name to be a suitable subcommand
def mangle_subcommand(name):
return util.camel_to_under(name).replace('_', '-').replace('.', '-')
def ident(name):
return mangle_subcommand(name).replace('-', '_')
# Return a required value in Rust, using unwrap()
def req_value(name):
return 'opt.value_of("' + mangle_subcommand(name) + '").unwrap()'
def opt_value(name, opt='opt', default=''):
return opt + '.value_of("' + mangle_subcommand(name) + ('").unwrap_or("%s")' % default)
def opt_values(name, opt='opt'):
return opt + '.values_of("' + mangle_subcommand(name) + '").map(|i|i.collect()).unwrap_or(Vec::new()).iter()'
def application_secret_path(program_name):
return program_name + '-secret.json'
# Returns identifier for method dealing with options for the given resource-method pair
def call_method_ident(resource, method):
return '_%s_%s' % (ident(resource), ident(method))
# transform the resource name into a suitable filename to contain the markdown documentation for it
def subcommand_md_filename(resource, method):
return mangle_subcommand(resource) + '_' + mangle_subcommand(method) + '.md'
def docopt_mode(protocols):
mode = '|'.join(protocols)
if len(protocols) > 1:
mode = '(%s)' % mode
return mode
# Returns a possibly remapped type, based on its name.
# Useful to map strings to more suitable types, i.e. counts
def actual_json_type(name, type):
return type
# return a string representing property `p` suitable for docopt argument parsing
def to_docopt_arg(p):
return '<%s>%s' % (mangle_subcommand(p.name), p.get('repeated', False) and '...' or '')
# Return schema' with fields dict: { 'field1' : SchemaField(...), 'SubSchema': schema' }
def to_cli_schema(c, schema):
res = deepcopy(schema)
fd = dict()
res['fields'] = fd
# util.nested_type_name
properties = schema.get('properties', dict())
if not properties and 'variant' in schema and 'map' in schema.variant:
for e in schema.variant.map:
assert util.TREF in e
properties[e.type_value] = e
# end handle enumerations
for pn, p in util.items(properties):
def set_nested_schema(ns):
if ns.fields:
fd[pn] = ns
# end utility
def dup_property():
pc = deepcopy(p)
if 'type' in pc and pc.type == 'string' and 'Count' in pn:
pc.type = 'int64'
return pc
# end
if util.TREF in p:
if p[util.TREF] != schema.id: # prevent recursion (in case of self-referential schemas)
set_nested_schema(to_cli_schema(c, c.schemas[p[util.TREF]]))
elif p.type == 'array' and 'items' in p and 'type' in p.get('items') and p.get('items').type in POD_TYPES:
pc = dup_property()
fd[pn] = SchemaEntry(CTYPE_ARRAY, pc.get('items'), pc)
elif p.type == 'object':
if util.is_map_prop(p):
if 'type' in p.additionalProperties and p.additionalProperties.type in POD_TYPES:
pc = dup_property()
fd[pn] = SchemaEntry(CTYPE_MAP, pc.additionalProperties, pc)
else:
set_nested_schema(to_cli_schema(c, c.schemas[util.nested_type_name(schema.id, pn)]))
elif p.type in POD_TYPES:
pc = dup_property()
fd[pn] = SchemaEntry(CTYPE_POD, pc, pc)
# end handle property type
# end
return res
# Convert the given cli-schema (result from to_cli_schema(schema)) to a yaml-like string. It's suitable for
# documentation only
def cli_schema_to_yaml(schema, prefix=''):
if not prefix:
o = '%s%s:\n' % (prefix, schema.id)
else:
o = ''
prefix += ' '
for fn in sorted(schema.fields.keys()):
f = schema.fields[fn]
o += '%s%s:' % (prefix, mangle_subcommand(fn))
if not isinstance(f, SchemaEntry):
o += '\n' + cli_schema_to_yaml(f, prefix)
else:
t = f.actual_property.type
if f.container_type == CTYPE_ARRAY:
t = '[%s]' % t
elif f.container_type == CTYPE_MAP:
t = '{ string: %s }' % t
o += ' %s\n' % t
# end for each field
return o
# Return a value string suitable for the given field.
def field_to_value(f):
v = JSON_TYPE_RND_MAP[f.actual_property.type]()
if f.container_type == CTYPE_MAP:
v = 'key=%s' % v
return v
# split the result along split segments
def process_template_result(r, output_file: str):
found = False
dir = None
if output_file:
dir = os.path.dirname(output_file)
if not os.path.isdir(dir):
os.makedirs(dir)
# end handle output directory
for m in re_splitters.finditer(r):
if not dir:
raise RuntimeError("Missing directory; was output_file specified?")
found = True
fh = open(os.path.join(dir, m.group(1)), 'wb')
fh.write(m.group(2).encode('UTF-8'))
fh.close()
# end for each match
if found:
r = None
return r

View File

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,61 @@
<%! import generator.lib.util as util %>\
## source should be ${self.uri}
## you need to escape the output, using a filter for example
<%def name="gen_info(source)">\
DO NOT EDIT !
This file was generated automatically from '${source}'
DO NOT EDIT !\
</%def>
## This will only work within a substitution, not within python code
<%def name="to_api_version(v)" buffered="True">\
<% assert len(v) >= 2 and v[0] == 'v'%>\
## convert it once to int, just to be sure it is an int
${v[1:]}\
</%def>
<%def name="github_source_root_url()" buffered="True">\
${cargo.repo_base_url}/tree/main/${directories.output}/${util.target_directory_name(name, version, make.target_suffix)}\
</%def>
<%def name="library_name()" buffered="True">\
${util.library_name(name, version)}\
</%def>
<%def name="crate_name()" buffered="True">\
${util.library_to_crate_name(util.library_name(name, version), make.target_suffix)}\
</%def>
<%def name="doc_base_url()" buffered="True">\
% if make.id == 'cli':
${cargo.doc_base_url + '/' + util.to_extern_crate_name(self.crate_name())}\
% else:
${util.docs_rs_url(cargo.doc_base_url, self.crate_name(), self.crate_version())}\
% endif
</%def>
<%def name="crate_version()" buffered="True">\
${util.crate_version(cargo.build_version, revision)}\
</%def>
## All crates and standard `use` declaration, required for all examples
## Must be outside of a test function
<%def name="test_prelude()">\
extern crate hyper;
extern crate hyper_rustls;
extern crate ${util.to_extern_crate_name(self.crate_name())} as ${self.library_name()};
</%def>
## Define the canonical name, if present, or name otherwise
<%def name="canonical_name()" buffered="True">\
% if canonicalName is UNDEFINED:
${name}\
% else:
${canonicalName}\
% endif
</%def>
<%def name="program_name()" buffered="True">\
${util.program_name(name, version)}\
</%def>

1051
src/generator/lib/util.py Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,123 @@
#!/usr/bin/env python
import importlib.resources
import unittest
import json
from generator.lib.util import to_api_version, library_name, re_find_replacements, to_rust_type, new_context
import generator.lib.test_data as test_data
TEST_JSON_FILE = "photoslibrary-api.json"
def read_test_json_file():
data = importlib.resources.read_text(test_data, TEST_JSON_FILE)
return json.loads(data)
class UtilsTest(unittest.TestCase):
def test_to_version_ok(self):
for v, want in (('v1.3', '1d3'),
('v1', '1'),
('directory_v1', '1_directory'),
('directory_v1.3', '1d3_directory'),
('v1beta2', '1_beta2'),
('v1sandbox', '1_sandbox'),
('v2.0', '2'),
('v2.0.1', '2d0d1'),
('v0.0', '0'),
('v0.1.0', '0d1'),
('v2.0beta3', '2_beta3'),
('alpha', 'alpha'),
('beta', 'beta'),
('vm_beta', 'vm_beta')):
res = to_api_version(v)
self.assertEqual(res, want)
def test_to_version_fail(self):
for iv in ('some_branch_name', '1.3'):
with self.assertRaises(AssertionError):
to_api_version(iv)
def test_library_name(self):
for v, want in (('v1', 'oauth2_v1'),
('v1.4', 'oauth2_v1d4'),
('alpha', 'oauth2_alpha'),
('beta', 'oauth2_beta'),
('vm_beta', 'oauth2_vm_beta')):
res = library_name('oauth2', v)
self.assertEqual(res, want)
def test_url_substitution(self):
url = "https://www.googleapis.com/resumable/upload/groups/v1/groups/{groupId}/{foo}/archive"
ms = list(re_find_replacements.finditer(url))
self.assertEqual(len(ms), 2)
self.assertEqual(ms[0].group(0), '{groupId}')
self.assertEqual(ms[1].group(0), '{foo}')
url = "customer/{customerId}/orgunits{/orgUnitPath*}"
ms = list(re_find_replacements.findall(url))
self.assertEqual(len(ms), 2)
self.assertEqual(ms[0], '{customerId}')
self.assertEqual(ms[1], '{/orgUnitPath*}')
url = "{+project}/subscriptions"
ms = list(re_find_replacements.findall(url))
self.assertEqual(len(ms), 1)
self.assertEqual(ms[0], '{+project}')
def test_to_rust_type(self):
full_api_schema = read_test_json_file()
schemas = full_api_schema['schemas']
# Get class
class_name = None
property_name = None
property_value = {'$ref': 'Album'}
rust_type = to_rust_type(schemas, class_name, property_name, property_value, allow_optionals=True)
self.assertEqual(rust_type, 'Option<Album>')
# allow_optionals=False
class_name = None
property_name = None
property_value = {'$ref': 'Album'}
rust_type = to_rust_type(schemas, class_name, property_name, property_value, allow_optionals=False)
self.assertEqual(rust_type, 'Album')
# Get properties
test_properties = (
('Album', 'title', 'String'), # string
('Status', 'code', 'i32'), # numeric
('Album', 'mediaItemsCount', 'String'), # numeric via "count" keyword
('Album', 'isWriteable', 'bool'), # boolean
('Album', 'shareInfo', 'ShareInfo'), # reference type
('SearchMediaItemsResponse', 'mediaItems', 'Vec<MediaItem>'), # array
)
for (class_name, property_name, expected) in test_properties:
property_value = schemas[class_name]['properties'][property_name]
rust_type = to_rust_type(schemas, class_name, property_name, property_value, allow_optionals=False)
self.assertEqual(rust_type, expected)
# items reference
class_name = 'SearchMediaItemsResponse'
property_name = 'mediaItems'
property_value = schemas[class_name]['properties'][property_name]
rust_type = to_rust_type(schemas, class_name, property_name, property_value, allow_optionals=True)
self.assertEqual(rust_type, 'Option<Vec<MediaItem>>')
# additionalProperties reference
class_name = 'Status'
property_name = 'details'
property_value = schemas[class_name]['properties'][property_name]
rust_type = to_rust_type(schemas, class_name, property_name, property_value, allow_optionals=True)
self.assertEqual(rust_type, 'Option<Vec<HashMap<String, String>>>')
def main():
unittest.main()
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,55 @@
<%! from generator.lib.util import (estr, enclose_in, hash_comment, library_to_crate_name, to_extern_crate_name) %>\
<%namespace name="util" file="../lib/util.mako"/>\
<%block filter="hash_comment">\
<%util:gen_info source="${self.uri}" />\
</%block>
[package]
name = "${util.crate_name()}"
version = "${util.crate_version()}"
authors = [${",\n ".join('"%s"' % a for a in cargo.authors)}]
description = "A complete library to interact with ${util.canonical_name()} (protocol ${version})"
repository = "${util.github_source_root_url()}"
% if documentationLink is not UNDEFINED and documentationLink:
homepage = "${documentationLink}"
% endif
documentation = "${util.doc_base_url()}"
license = "${copyright.license_abbrev}"
keywords = ["${name[:20]}", ${", ".join(estr(cargo.keywords))}]
autobins = false
edition = "2018"
% if cargo.get('is_executable', False):
[[bin]]
name = "${util.program_name()}"
path = "src/main.rs"
% endif
[dependencies]
hyper-rustls = "0.23.0"
## Must match the one hyper uses, otherwise there are duplicate similarly named `Mime` structs
mime = "^ 0.2.0"
serde = "^ 1.0"
serde_json = "^ 1.0"
serde_derive = "^ 1.0"
yup-oauth2 = "^ 7.0"
itertools = "^ 0.10"
% for dep in cargo.get('dependencies', list()):
${dep}
% endfor
<%
api_name = util.library_name()
crate_name_we_depend_on = None
if make.depends_on_suffix is not None:
crate_name_we_depend_on = library_to_crate_name(api_name, suffix=make.depends_on_suffix)
%>\
% if make.depends_on_suffix is not None:
[dependencies.${crate_name_we_depend_on}]
path = "../${api_name}"
version = "${util.crate_version()}"
% endif

View File

@@ -0,0 +1,31 @@
## -*- coding: utf-8 -*-
<%! import generator.lib.util as util %>\
<%namespace name="mutil" file="../lib/util.mako"/>\
<%block filter="util.markdown_comment">\
<%mutil:gen_info source="${self.uri}" />\
</%block>
The MIT License (MIT)
=====================
Copyright © `${copyright.years}` ${util.put_and(["`%s`" % a for a in copyright.authors])}
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the “Software”), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.

View File

@@ -0,0 +1,13 @@
<%
from generator.lib.util import (markdown_comment, new_context)
c = new_context(schemas, resources, context.get('methods'))
%>\
<%namespace name="lib" file="lib/lib.mako"/>\
<%namespace name="util" file="../../lib/util.mako"/>\
<%block filter="markdown_comment">\
<%util:gen_info source="${self.uri}" />\
</%block>
The `${util.crate_name()}` library allows access to all features of the *Google ${util.canonical_name()}* service.
${lib.docs(c, rust_doc=False)}
<%lib:license />

View File

@@ -0,0 +1,141 @@
<%namespace name="lib" file="lib/lib.mako"/>\
<%namespace name="util" file="../../lib/util.mako"/>\
<%namespace name="rbuild" file="lib/rbuild.mako"/>\
<%namespace name="mbuild" file="lib/mbuild.mako"/>\
<%namespace name="schema" file="lib/schema.mako"/>\
<%
from generator.lib.util import (new_context, rust_comment, rust_doc_comment, rust_module_doc_comment,
rb_type, hub_type, mangle_ident, hub_type_params_s,
rb_type_params_s, find_fattest_resource, HUB_TYPE_PARAMETERS, METHODS_RESOURCE,
UNUSED_TYPE_MARKER, schema_markers)
c = new_context(schemas, resources, context.get('methods'))
hub_type = hub_type(c.schemas, util.canonical_name())
ht_params = hub_type_params_s()
default_user_agent = "google-api-rust-client/" + cargo.build_version
%>\
use std::collections::HashMap;
use std::cell::RefCell;
use std::default::Default;
use std::collections::BTreeMap;
use std::error::Error as StdError;
use serde_json as json;
use std::io;
use std::fs;
use std::mem;
use std::thread::sleep;
use http::Uri;
use hyper::client::connect;
use tokio::io::{AsyncRead, AsyncWrite};
use tower_service;
use crate::client;
// ##############
// UTILITIES ###
// ############
${lib.scope_enum()}
// ########
// HUB ###
// ######
/// Central instance to access all ${hub_type} related resource activities
///
/// # Examples
///
/// Instantiate a new hub
///
<%block filter="rust_doc_comment">\
${lib.hub_usage_example(c)}\
</%block>
#[derive(Clone)]
pub struct ${hub_type}${ht_params} {
pub client: hyper::Client<S, hyper::body::Body>,
pub auth: oauth2::authenticator::Authenticator<S>,
_user_agent: String,
_base_url: String,
_root_url: String,
}
impl<'a, ${', '.join(HUB_TYPE_PARAMETERS)}> client::Hub for ${hub_type}${ht_params} {}
impl<'a, ${', '.join(HUB_TYPE_PARAMETERS)}> ${hub_type}${ht_params} {
pub fn new(client: hyper::Client<S, hyper::body::Body>, authenticator: oauth2::authenticator::Authenticator<S>) -> ${hub_type}${ht_params} {
${hub_type} {
client,
auth: authenticator,
_user_agent: "${default_user_agent}".to_string(),
_base_url: "${baseUrl}".to_string(),
_root_url: "${rootUrl}".to_string(),
}
}
% for resource in sorted(c.rta_map.keys()):
pub fn ${mangle_ident(resource)}(&'a self) -> ${rb_type(resource)}${rb_type_params_s(resource, c)} {
${rb_type(resource)} { hub: &self }
}
% endfor
/// Set the user-agent header field to use in all requests to the server.
/// It defaults to `${default_user_agent}`.
///
/// Returns the previously set user-agent.
pub fn user_agent(&mut self, agent_name: String) -> String {
mem::replace(&mut self._user_agent, agent_name)
}
/// Set the base url to use in all requests to the server.
/// It defaults to `${baseUrl}`.
///
/// Returns the previously set base url.
pub fn base_url(&mut self, new_base_url: String) -> String {
mem::replace(&mut self._base_url, new_base_url)
}
/// Set the root url to use in all requests to the server.
/// It defaults to `${rootUrl}`.
///
/// Returns the previously set root url.
pub fn root_url(&mut self, new_root_url: String) -> String {
mem::replace(&mut self._root_url, new_root_url)
}
}
% if c.schemas:
// ############
// SCHEMAS ###
// ##########
% for s in c.schemas.values():
% if UNUSED_TYPE_MARKER not in schema_markers(s, c, transitive=True):
${schema.new(s, c)}
% endif
% endfor
% endif
// ###################
// MethodBuilders ###
// #################
% for resource in c.rta_map:
${rbuild.new(resource, c)}
% endfor
// ###################
// CallBuilders ###
// #################
% for resource, methods in c.rta_map.items():
% for method in methods:
${mbuild.new(resource, method, c)}
% endfor ## method in methods
% endfor ## resource, methods

View File

@@ -0,0 +1,61 @@
<%namespace name="lib" file="lib/lib.mako"/>\
<%namespace name="util" file="../../lib/util.mako"/>\
<%
from generator.lib.util import (new_context, rust_comment, rust_module_doc_comment)
c = new_context(schemas, resources, context.get('methods'))
%>\
<%block filter="rust_comment">\
<%util:gen_info source="${self.uri}" />\
</%block>
<%block filter="rust_module_doc_comment">\
${lib.docs(c)}
</%block>
// Unused attributes happen thanks to defined, but unused structures
// We don't warn about this, as depending on the API, some data structures or facilities are never used.
// Instead of pre-determining this, we just disable the lint. It's manually tuned to not have any
// unused imports in fully featured APIs. Same with unused_mut ... .
#![allow(unused_imports, unused_mut, dead_code)]
<%namespace name="lib" file="lib/lib.mako"/>\
<%namespace name="util" file="../../lib/util.mako"/>\
<%namespace name="rbuild" file="lib/rbuild.mako"/>\
<%namespace name="mbuild" file="lib/mbuild.mako"/>\
<%namespace name="schema" file="lib/schema.mako"/>\
<%
from generator.lib.util import (new_context, rust_comment, rust_doc_comment, rust_module_doc_comment,
rb_type, hub_type, mangle_ident, hub_type_params_s,
rb_type_params_s, find_fattest_resource, HUB_TYPE_PARAMETERS, METHODS_RESOURCE,
UNUSED_TYPE_MARKER, schema_markers)
c = new_context(schemas, resources, context.get('methods'))
hub_type = hub_type(c.schemas, util.canonical_name())
ht_params = hub_type_params_s()
default_user_agent = "google-api-rust-client/" + cargo.build_version
%>\
<%block filter="rust_comment">\
<%util:gen_info source="${self.uri}" />\
</%block>
#[macro_use]
extern crate serde_derive;
// Re-export the hyper and hyper_rustls crate, they are required to build the hub
pub extern crate hyper;
pub extern crate hyper_rustls;
extern crate serde;
extern crate serde_json;
// Re-export the yup_oauth2 crate, that is required to call some methods of the hub and the client
pub extern crate yup_oauth2 as oauth2;
extern crate mime;
extern crate url;
pub mod api;
pub mod client;
// Re-export the hub type and some basic client structs
pub use api::${hub_type};
pub use client::{Result, Error, Delegate};

View File

@@ -0,0 +1,371 @@
<%!
from generator.lib.util import (activity_split, put_and, md_italic, split_camelcase_s, canonical_type_name, hub_type,
rust_test_fn_invisible, rust_doc_test_norun, rust_doc_comment, markdown_rust_block,
unindent_first_by, mangle_ident, mb_type, singular, scope_url_to_variant,
PART_MARKER_TRAIT, RESOURCE_MARKER_TRAIT, CALL_BUILDER_MARKERT_TRAIT,
find_fattest_resource, build_all_params, pass_through, parts_from_params,
REQUEST_MARKER_TRAIT, RESPONSE_MARKER_TRAIT, supports_scopes, to_api_version,
to_fqan, METHODS_RESOURCE, ADD_PARAM_MEDIA_EXAMPLE, PROTOCOL_TYPE_INFO, enclose_in,
upload_action_fn, METHODS_BUILDER_MARKER_TRAIT, DELEGATE_TYPE,
to_extern_crate_name, rust_doc_sanitize)
def pretty_name(name):
return ' '.join(split_camelcase_s(name).split('.'))
%>\
<%namespace name="util" file="../../../lib/util.mako"/>\
<%namespace name="mbuild" file="mbuild.mako"/>\
## If rust-doc is True, examples will be made to work for rust doc tests. Otherwise they are set
## for github markdown.
###############################################################################################
###############################################################################################
<%def name="docs(c, rust_doc=True)">\
<%
# fr == fattest resource, the fatter, the more important, right ?
fr = find_fattest_resource(c)
hub_url = hub_type(c.schemas, util.canonical_name())
call_builder_url = CALL_BUILDER_MARKERT_TRAIT
delegate_url = DELEGATE_TYPE
request_trait_url = REQUEST_MARKER_TRAIT
response_trait_url = RESPONSE_MARKER_TRAIT
part_trait_url = PART_MARKER_TRAIT
doc_base_url = util.doc_base_url() + '/' + to_extern_crate_name(util.crate_name()) + '/'
def link(name, url):
lf = '[%s](%s)'
if rust_doc:
return lf % (name, url)
for scheme in ('http', 'https'):
if url.startswith(scheme + '://'):
return lf % (name, url)
return lf % (name, doc_base_url + url)
api_version = to_api_version(version)
if api_version[0].isdigit():
api_version = 'v' + api_version
upload_methods, download_methods, subscription_methods = list(), list(), list()
for m in c.fqan_map.values():
for array, param in ((download_methods, 'supportsMediaDownload'),
(upload_methods, 'supportsMediaUpload'),
(subscription_methods, 'supportsSubscription')):
if m.get(param, False):
array.append(m)
# end for each method
header_methods = (('Upload', upload_methods), ('Download', download_methods), ('Subscription', subscription_methods))
%>\
This documentation was generated from *${util.canonical_name()}* crate version *${util.crate_version()}*, where *${revision is UNDEFINED and '00000000' or revision}* is the exact revision of the *${id}* schema built by the [mako](http://www.makotemplates.org/) code generator *v${cargo.build_version}*.
% if documentationLink:
Everything else about the *${util.canonical_name()}* *${api_version}* API can be found at the
[official documentation site](${documentationLink}).
% endif
% if rust_doc:
The original source code is [on github](${util.github_source_root_url()}).
% endif
# Features
% if len(c.rta_map) > 0 + (METHODS_RESOURCE in c.rta_map):
Handle the following *Resources* with ease from the central ${link('hub', hub_url)} ...
% elif METHODS_RESOURCE in c.rta_map:
Use the following functionality with ease from the central ${link('hub', hub_url)} ...
% else:
It seems there is nothing you can do here ... .
% endif
% for r in sorted(c.rta_map.keys()):
% if r == METHODS_RESOURCE:
<% continue %>
% endif ## skip method resource
<%
md_methods = list()
for method in sorted(c.rta_map[r]):
md_methods.append(link('*%s*' % pretty_name(method), "api::%s" % mb_type(r, method)))
md_resource = pretty_name(r)
sn = singular(canonical_type_name(r))
if sn in schemas:
md_resource = link(md_resource, "api::%s" % sn)
%>\
* ${md_resource}
* ${put_and(md_methods)}
% endfor ## each resource activity
% if METHODS_RESOURCE in c.rta_map:
% if len(c.rta_map) > 1:
Other activities are ...
% endif
% for method in sorted(c.rta_map[METHODS_RESOURCE]):
* ${link(pretty_name(method), "api::%s" % mb_type(METHODS_RESOURCE, method))}
% endfor
% endif
% for method_type, methods in header_methods:
% if methods:
${method_type} supported by ...
% for m in methods:
<%
_, resource, method = activity_split(m.id)
name_parts = [pretty_name(method)]
if resource != METHODS_RESOURCE:
name_parts.append(pretty_name(resource))
%>\
* ${link('*%s*' % ' '.join(name_parts), "api::%s" % mb_type(resource, method))}
% endfor ## for each method
% endif ## if methods
% endfor ## for each method type
% if rust_doc:
Not what you are looking for ? Find all other Google APIs in their Rust [documentation index](http://byron.github.io/google-apis-rs).
% endif
# Structure of this Library
The API is structured into the following primary items:
* **${link('Hub', hub_url)}**
* a central object to maintain state and allow accessing all *Activities*
* creates ${link('*Method Builders*', METHODS_BUILDER_MARKER_TRAIT)} which in turn
allow access to individual ${link('*Call Builders*', call_builder_url)}
* **${link('Resources', RESOURCE_MARKER_TRAIT)}**
* primary types that you can apply *Activities* to
* a collection of properties and *Parts*
* **${link('Parts', part_trait_url)}**
* a collection of properties
* never directly used in *Activities*
* **${link('Activities', call_builder_url)}**
* operations to apply to *Resources*
All *structures* are marked with applicable traits to further categorize them and ease browsing.
Generally speaking, you can invoke *Activities* like this:
```Rust,ignore
let r = hub.resource().activity(...).${api.terms.action}().await
```
% if fr:
Or specifically ...
```ignore
% for an, a in c.sta_map[fr.id].items():
<% category, resource, activity = activity_split(an) %>\
let r = hub.${mangle_ident(resource)}().${mangle_ident(activity)}(...).${api.terms.action}().await
% endfor
```
% endif
The `resource()` and `activity(...)` calls create [builders][builder-pattern]. The second one dealing with `Activities`
supports various methods to configure the impending operation (not shown here). It is made such that all required arguments have to be
specified right away (i.e. `(...)`), whereas all optional ones can be [build up][builder-pattern] as desired.
The `${api.terms.action}()` method performs the actual communication with the server and returns the respective result.
# Usage
${'##'} Setting up your Project
To use this library, you would put the following lines into your `Cargo.toml` file:
```toml
[dependencies]
${util.crate_name()} = "*"
serde = "^1.0"
serde_json = "^1.0"
```
${'##'} A complete example
${self.hub_usage_example(c, rust_doc, fr=fr)}\
${'##'} Handling Errors
All errors produced by the system are provided either as ${link('Result', 'client::Result')} enumeration as return value of
the ${api.terms.action}() methods, or handed as possibly intermediate results to either the
${link('Hub Delegate', delegate_url)}, or the ${link('Authenticator Delegate', urls.authenticator_delegate)}.
When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This
makes the system potentially resilient to all kinds of errors.
${'##'} Uploads and Downloads
If a method supports downloads, the response body, which is part of the ${link('Result', 'client::Result')}, should be
read by you to obtain the media.
If such a method also supports a ${link('Response Result', 'client::ResponseResult')}, it will return that by default.
You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making
this call: `${ADD_PARAM_MEDIA_EXAMPLE}`.
Methods supporting uploads can do so using up to ${len(PROTOCOL_TYPE_INFO)} different protocols:
${put_and(md_italic(PROTOCOL_TYPE_INFO.keys()))}. The distinctiveness of each is represented by customized
`${api.terms.action}(...)` methods, which are then named ${put_and(enclose_in('`', ("%s(...)" % upload_action_fn(api.terms.upload_action, v['suffix']) for v in PROTOCOL_TYPE_INFO.values())))} respectively.
${'##'} Customization and Callbacks
You may alter the way an `${api.terms.action}()` method is called by providing a ${link('delegate', delegate_url)} to the
${link('Method Builder', call_builder_url)} before making the final `${api.terms.action}()` call.
Respective methods will be called to provide progress information, as well as determine whether the system should
retry on failure.
The ${link('delegate trait', delegate_url)} is default-implemented, allowing you to customize it with minimal effort.
${'##'} Optional Parts in Server-Requests
All structures provided by this library are made to be ${link('encodable', request_trait_url)} and
${link('decodable', response_trait_url)} via *json*. Optionals are used to indicate that partial requests are responses
are valid.
Most optionals are are considered ${link('Parts', part_trait_url)} which are identifiable by name, which will be sent to
the server to indicate either the set parts of the request or the desired parts in the response.
${'##'} Builder Arguments
Using ${link('method builders', call_builder_url)}, you are able to prepare an action call by repeatedly calling it's methods.
These will always take a single argument, for which the following statements are true.
* [PODs][wiki-pod] are handed by copy
* strings are passed as `&str`
* ${link('request values', request_trait_url)} are moved
Arguments will always be copied or cloned into the builder, to make them independent of their original life times.
[wiki-pod]: http://en.wikipedia.org/wiki/Plain_old_data_structure
[builder-pattern]: http://en.wikipedia.org/wiki/Builder_pattern
[google-go-api]: https://github.com/google/google-api-go-client
</%def>
## Sets up a hub ready for use. You must wrap it into a test function for it to work
## Needs test_prelude.
###############################################################################################
###############################################################################################
<%def name="test_hub(hub_type, comments=True)">\
use std::default::Default;
use ${util.library_name()}::{${hub_type}, oauth2, hyper, hyper_rustls};
% if comments:
// Get an ApplicationSecret instance by some means. It contains the `client_id` and
// `client_secret`, among other things.
% endif
let secret: oauth2::ApplicationSecret = Default::default();
% if comments:
// Instantiate the authenticator. It will choose a suitable authentication flow for you,
// unless you replace `None` with the desired Flow.
// Provide your own `AuthenticatorDelegate` to adjust the way it operates and get feedback about
// what's going on. You probably want to bring in your own `TokenStorage` to persist tokens and
// retrieve them from storage.
% endif
let auth = oauth2::InstalledFlowAuthenticator::builder(
secret,
oauth2::InstalledFlowReturnMethod::HTTPRedirect,
).build().await.unwrap();
let mut hub = ${hub_type}::new(hyper::Client::builder().build(hyper_rustls::HttpsConnectorBuilder::new().with_native_roots().https_or_http().enable_http1().enable_http2().build()), auth);\
</%def>
## You will still have to set the filter for your comment type - either nothing, or rust_doc_comment !
###############################################################################################
###############################################################################################
<%def name="hub_usage_example(c, rust_doc=True, fr=None)">\
<%
test_filter = rust_test_fn_invisible
main_filter = rust_doc_test_norun
if not rust_doc:
test_filter = pass_through
main_filter = markdown_rust_block
if fr is None:
fr = find_fattest_resource(c)
if fr is not None:
fqan = None
last_param_count = None
for fqan in c.sta_map[fr.id]:
category, aresource, amethod = activity_split(fqan)
# Cannot use fqan directly, as it might need remapping thanks to 'special case' resource.
# see METHODS_RESOURCE for more information
am = c.fqan_map[to_fqan(category, aresource, amethod)]
build_all_params(c, am)
aparams, arequest_value = build_all_params(c, am)
if last_param_count is None or len(aparams) > last_param_count:
m, resource, method, params, request_value = am, aresource or category, amethod, aparams, arequest_value
last_param_count = len(aparams)
# end for each fn to test
part_prop, parts = parts_from_params(params)
# end fill in values
%>\
% if fr:
${mbuild.usage(resource, method, m, params, request_value, parts, show_all=True, rust_doc=rust_doc, handle_result=True)}\
% else:
<%block filter="main_filter">\
${util.test_prelude()}\
<%block filter="test_filter">\
${self.test_hub(hub_type(c.schemas, util.canonical_name()))}
</%block>
</%block>
% endif
</%def>
###############################################################################################
###############################################################################################
<%def name="license()">\
# License
The **${util.library_name()}** library was generated by ${put_and(copyright.authors)}, and is placed
under the *${copyright.license_abbrev}* license.
You can read the full text at the repository's [license file][repo-license].
[repo-license]: ${cargo.repo_base_url + 'blob/main/LICENSE.md'}
</%def>
## Builds the scope-enum for the API
## It's possible there is no scope enum if there is no auth information
###############################################################################################
###############################################################################################
<%def name="scope_enum()">\
% if not supports_scopes(auth):
<% return '' %>\
% endif
/// Identifies the an OAuth2 authorization scope.
/// A scope is needed when requesting an
/// [authorization token](https://developers.google.com/youtube/v3/guides/authentication).
#[derive(PartialEq, Eq, Hash)]
pub enum Scope {
% for url, scope in auth.oauth2.scopes.items():
${scope.description | rust_doc_sanitize, rust_doc_comment}
${scope_url_to_variant(name, url, fully_qualified=False)},
% if not loop.last:
% endif
% endfor
}
impl AsRef<str> for Scope {
fn as_ref(&self) -> &str {
match *self {
% for url in auth.oauth2.scopes.keys():
${scope_url_to_variant(name, url)} => "${url}",
% endfor
}
}
}
impl Default for Scope {
fn default() -> Scope {
<%
default_url = None
shortest_url = None
for url in auth.oauth2.scopes.keys():
if not default_url and 'readonly' in url:
default_url = url
if not shortest_url or len(shortest_url) > len(url):
shortest_url = url
# end for each url
default_url = default_url or shortest_url
%>\
${scope_url_to_variant(name, default_url)}
}
}
</%def>

View File

@@ -0,0 +1,927 @@
<%!
from generator.lib.util import (put_and, rust_test_fn_invisible, rust_doc_test_norun, rust_doc_comment,
rb_type, mb_type, singular, hub_type, to_fqan, indent_all_but_first_by,
activity_rust_type, mangle_ident, activity_input_type, get_word,
split_camelcase_s, property, is_pod_property, TREF, IO_REQUEST,
schema_to_required_property, rust_copy_value_s, is_required_property,
hide_rust_doc_test, build_all_params, REQUEST_VALUE_PROPERTY_NAME, organize_params,
indent_by, to_rust_type, rnd_arg_val_for_type, extract_parts, mb_type_params_s,
hub_type_params_s, method_media_params, enclose_in, method_response,
CALL_BUILDER_MARKERT_TRAIT, pass_through, markdown_rust_block, parts_from_params,
DELEGATE_PROPERTY_NAME, struct_type_bounds_s, scope_url_to_variant,
re_find_replacements, ADD_PARAM_FN, ADD_PARAM_MEDIA_EXAMPLE, upload_action_fn, METHODS_RESOURCE,
method_name_to_variant, size_to_bytes, method_default_scope,
is_repeated_property, setter_fn_name, ADD_SCOPE_FN, rust_doc_sanitize, items)
def get_parts(part_prop):
if not part_prop:
return list()
return extract_parts(part_prop.get('description', ''))
def make_parts_desc(part_prop):
parts = get_parts(part_prop)
if not parts:
return None
part_desc = "**Settable Parts**\n\n"
part_desc += ''.join('* *%s*\n' % part for part in parts)
part_desc = part_desc[:-1]
return part_desc
%>\
<%namespace name="util" file="../../../lib/util.mako"/>\
<%namespace name="lib" file="lib.mako"/>\
## Creates a method builder type
###############################################################################################
###############################################################################################
<%def name="new(resource, method, c)">\
<%
hub_type_name = hub_type(schemas,util.canonical_name())
m = c.fqan_map[to_fqan(c.rtc_map[resource], resource, method)]
response_schema = method_response(c, m)
# an identifier for a property. We prefix them to prevent clashes with the setters
mb_tparams = mb_type_params_s(m)
ThisType = mb_type(resource, method) + mb_tparams
params, request_value = build_all_params(c, m)
alt_param = None
for p in params:
if p.name == 'alt':
alt_param = p
break
# end
# end
part_prop, parts = parts_from_params(params)
part_desc = make_parts_desc(part_prop)
parts = get_parts(part_prop)
%>\
% if 'description' in m:
${m.description | rust_doc_sanitize, rust_doc_comment}
///
% endif
% if m.get('supportsMediaDownload', False):
/// This method supports **media download**. To enable it, adjust the builder like this:
% if alt_param:
/// `.${mangle_ident(setter_fn_name(alt_param))}("media")`.
% else:
/// `${ADD_PARAM_MEDIA_EXAMPLE}`.
% endif
% if response_schema:
/// Please note that due to missing multi-part support on the server side, you will only receive the media,
/// but not the `${response_schema.id}` structure that you would usually get. The latter will be a default value.
% endif
///
% endif ## supports media download
% if resource == METHODS_RESOURCE:
/// A builder for the *${method}* method.
% else:
/// A builder for the *${method}* method supported by a *${singular(resource)}* resource.
% endif
/// It is not used directly, but through a `${rb_type(resource)}` instance.
///
% if part_desc:
${part_desc | rust_doc_sanitize, rust_doc_comment}
///
% if m.get('scopes'):
/// # Scopes
///
/// You will need authorization for \
% if len(m.scopes) > 1:
at least one of the following scopes to make a valid call, possibly depending on *parts*:
///
% for s in m.scopes:
/// * *${s}*
% endfor
% else:
the *${m.scopes[0]}* scope to make a valid call.
% endif # len(scopes) > 1
///
/// The default scope will be `${scope_url_to_variant(name, method_default_scope(m), fully_qualified=True)}`.
% endif # have scopes
///
% endif
/// # Example
///
/// Instantiate a resource method builder
///
<%block filter="rust_doc_comment">\
${self.usage(resource, method, m, params, request_value, parts)}\
</%block>
pub struct ${ThisType}
where ${struct_type_bounds_s()} {
hub: &'a ${hub_type_name}${hub_type_params_s()},
## PROPERTIES ###############
% for p in params:
${property(p.name)}:\
% if is_required_property(p):
${activity_rust_type(schemas, p, allow_optionals=False)},
% else:
${activity_rust_type(schemas, p)},
% endif
% endfor
## A generic map for additinal parameters. Sometimes you can set some that are documented online only
${api.properties.params}: HashMap<String, String>,
% if method_default_scope(m):
## We need the scopes sorted, to not unnecessarily query new tokens
${api.properties.scopes}: BTreeMap<String, ()>
% endif
}
impl${mb_tparams} ${CALL_BUILDER_MARKERT_TRAIT} for ${ThisType} {}
impl${mb_tparams} ${ThisType}
where
S: tower_service::Service<Uri> + Clone + Send + Sync + 'static,
S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static,
S::Future: Send + Unpin + 'static,
S::Error: Into<Box<dyn StdError + Send + Sync>>,
{
% if api.get('no_upload_prefix') is not None and ThisType.startswith(api.no_upload_prefix):
${self._action_fn(c, resource, method, m, params, request_value, parts, doit_without_upload = True)}\
% endif
${self._action_fn(c, resource, method, m, params, request_value, parts)}\
## SETTERS ###############
% for p in params:
${self._setter_fn(resource, method, m, p, part_prop, ThisType, c)}\
% endfor
/// Set any additional parameter of the query string used in the request.
/// It should be used to set parameters which are not yet available through their own
/// setters.
///
/// Please note that this method must not be used to set any of the known parameters
/// which have their own setter method. If done anyway, the request will fail.
% if parameters:
///
/// # Additional Parameters
///
% for opn, op in list((opn, op) for (opn, op) in parameters.items() if opn not in [p.name for p in params]):
/// * *${opn}* (${op.location}-${op.type}) - ${op.description}
% endfor
% endif
pub fn ${ADD_PARAM_FN}<T>(mut self, name: T, value: T) -> ${ThisType}
where T: AsRef<str> {
self.${api.properties.params}.insert(name.as_ref().to_string(), value.as_ref().to_string());
self
}
% if method_default_scope(m):
/// Identifies the authorization scope for the method you are building.
///
/// Use this method to actively specify which scope should be used, instead the default `Scope` variant
/// `${scope_url_to_variant(name, method_default_scope(m), fully_qualified=True)}`.
///
/// The `scope` will be added to a set of scopes. This is important as one can maintain access
/// tokens for more than one scope.
/// If `None` is specified, then all scopes will be removed and no default scope will be used either.
/// In that case, you have to specify your API-key using the `key` parameter (see the `${ADD_PARAM_FN}()`
/// function for details).
///
/// Usually there is more than one suitable scope to authorize an operation, some of which may
/// encompass more rights than others. For example, for listing resources, a *read-only* scope will be
/// sufficient, a read-write scope will do as well.
pub fn ${ADD_SCOPE_FN}<T, St>(mut self, scope: T) -> ${ThisType}
where T: Into<Option<St>>,
St: AsRef<str> {
match scope.into() {
Some(scope) => self.${api.properties.scopes}.insert(scope.as_ref().to_string(), ()),
None => None,
};
self
}
% endif
}
</%def>
## creates a setter for the call builder
###############################################################################################
###############################################################################################
<%def name="_setter_fn(resource, method, m, p, part_prop, ThisType, c)">\
<%
InType = activity_input_type(schemas, p)
if is_repeated_property(p):
p.repeated = False
InType = activity_input_type(schemas, p)
p.repeated = True
def show_part_info(m, p):
if p.name != 'part':
return False
if not (m.get('request') and m.get('response')):
return False
return m.request.get(TREF, 'first') == m.response.get(TREF, 'second')
value_name = 'new_value'
new_value_copied = rust_copy_value_s(value_name, InType, p)
if not is_required_property(p) and not is_repeated_property(p):
new_value_copied = 'Some(%s)' % new_value_copied
part_desc = None
if part_prop is not None and p.name in ('part', REQUEST_VALUE_PROPERTY_NAME):
part_desc = make_parts_desc(part_prop)
# end part description
%>\
% if 'description' in p:
${p.description | rust_doc_sanitize, rust_doc_comment, indent_all_but_first_by(1)}
% endif
% if is_repeated_property(p):
///
/// Append the given value to the *${split_camelcase_s(p.name)}* ${get_word(p, 'location')}property.
/// Each appended value will retain its original ordering and be '/'-separated in the URL's parameters.
% else:
///
/// Sets the *${split_camelcase_s(p.name)}* ${get_word(p, 'location')}property to the given value.
% endif
% if show_part_info(m, p):
///
/// Even though the *parts* list is automatically derived from *Resource* passed in
/// during instantiation and indicates which values you are passing, the response would contain the very same parts.
/// This may not always be desirable, as you can obtain (newly generated) parts you cannot pass in,
/// like statistics that are generated server side. Therefore you should use this method to specify
/// the parts you provide in addition to the ones you want in the response.
% elif is_required_property(p):
///
/// Even though the property as already been set when instantiating this call,
/// we provide this method for API completeness.
% endif
% if part_desc:
///
${part_desc | rust_doc_sanitize, rust_doc_comment, indent_all_but_first_by(1)}
% endif
pub fn ${mangle_ident(setter_fn_name(p))}(mut self, ${value_name}: ${InType}) -> ${ThisType} {
% if p.get('repeated', False):
self.${property(p.name)}.push(${new_value_copied});
% else:
self.${property(p.name)} = ${new_value_copied};
% endif
self
}
</%def>
## creates usage docs the method builder
## show_all: If True, we will show all comments and hide no prelude. It's good to build a complete,
## documented example for a given method.
###############################################################################################
###############################################################################################
<%def name="usage(resource, method, m, params, request_value, parts=None, show_all=False, rust_doc=True, handle_result=False)">\
<%
hub_type_name = hub_type(schemas, util.canonical_name())
required_props, optional_props, part_prop = organize_params(params, request_value)
is_string_value = lambda v: v.endswith('"')
# to rust value
def trv(spn, sp, sn=None):
prev = sp.get('repeated', False)
sp.repeated = False
res = to_rust_type(schemas, sn, spn, sp, allow_optionals=False)
sp.repeated = prev
return res
# rvfrt = random value for rust type
rvfrt = lambda spn, sp, sn=None: rnd_arg_val_for_type(trv(spn, sp, sn))
rb_name = 'req' # name of request binding
required_args = request_value and [rb_name] or []
for p in required_props:
# could also just skip the first element, but ... let's be safe
if request_value and request_value.id == p.get(TREF):
continue
v = rnd_arg_val_for_type(activity_input_type(schemas, p))
# we chose to replace random strings with their meaning, as indicated by the name !
if is_string_value(v):
v = '"%s"' % p.name
required_args.append(v)
# end for each required property
required_args = ', '.join(required_args)
media_params = method_media_params(m)
if media_params:
# index 0 == Simple (usually)
# index 1 == Resumable
# propose standard upload for smaller media. Also means we get to test different code-paths
index = -1
if media_params[-1].max_size < 100*1024*1024:
index = 0
action_name = upload_action_fn(api.terms.upload_action, media_params[index].type.suffix)
else:
action_name = api.terms.action
action_args = media_params and media_params[-1].type.example_value or ''
random_value_warning = "Values shown here are possibly random and not representative !"
hide_filter = show_all and pass_through or hide_rust_doc_test
test_block_filter = rust_doc and rust_doc_test_norun or markdown_rust_block
test_fn_filter = rust_doc and rust_test_fn_invisible or pass_through
if request_value:
request_value_type = request_value.id
%>\
<%block filter="test_block_filter">\
${capture(util.test_prelude) | hide_filter}\
% if request_value:
use ${util.library_name()}::api::${request_value_type};
% endif
% if handle_result:
use ${util.library_name()}::{Result, Error};
% endif
% if media_params:
use std::fs;
% endif
<%block filter="test_fn_filter">\
${capture(lib.test_hub, hub_type_name, comments=show_all) | hide_filter}
% if request_value:
// As the method needs a request, you would usually fill it with the desired information
// into the respective structure. Some of the parts shown here might not be applicable !
// ${random_value_warning}
let mut ${rb_name} = ${request_value_type}::default();
% for spn, sp in items(request_value.get('properties', dict())):
% if parts is not None and spn not in parts:
<% continue %>
% endif
<%
rtn = trv(spn, sp, request_value.id)
assignment = rnd_arg_val_for_type(rtn)
if is_string_value(assignment):
assignment = assignment + '.to_string()'
if assignment.endswith('default()'):
assignment = assignment[1:] # cut & - it's not ok in this case :)!
assignment += '; // is %s' % rtn
else:
assignment = 'Some(%s);' % assignment
%>\
${rb_name}.${mangle_ident(spn)} = ${assignment}
% endfor
% endif
// You can configure optional parameters by calling the respective setters at will, and
// execute the final call using `${action_name}(${action_args and '...' or ''})`.
% if optional_props:
// ${random_value_warning}
% endif
let result = hub.${mangle_ident(resource)}().${mangle_ident(method)}(${required_args})\
% for p in optional_props:
% if p.get('skip_example', False):
<% continue %>
% endif
<%block filter="indent_by(13)">\
.${mangle_ident(setter_fn_name(p))}(${rvfrt(p.name, p)})\
</%block>\
% endfor
${'.' + action_name | indent_by(13)}(${action_args}).await;
% if handle_result:
match result {
Err(e) => match e {
// The Error enum provides details about what exactly happened.
// You can also just use its `Debug`, `Display` or `Error` traits
Error::HttpError(_)
|Error::Io(_)
|Error::MissingAPIKey
|Error::MissingToken(_)
|Error::Cancelled
|Error::UploadSizeLimitExceeded(_, _)
|Error::Failure(_)
|Error::BadRequest(_)
|Error::FieldClash(_)
|Error::JsonDecodeError(_, _) => println!("{}", e),
},
Ok(res) => println!("Success: {:?}", res),
}
% endif
</%block>
</%block>\
</%def>
## create an entire 'api.terms.action' method
###############################################################################################
###############################################################################################
<%def name="_action_fn(c, resource, method, m, params, request_value, parts, doit_without_upload = False)">\
<%
import os.path
join_url = lambda b, e: b.strip('/') + e
if doit_without_upload:
media_params = []
else:
media_params = method_media_params(m)
type_params = ''
where = ''
qualifier = 'pub '
add_args = ''
rtype = 'client::Result<hyper::Response<hyper::body::Body>>'
response_schema = method_response(c, m)
supports_download = m.get('supportsMediaDownload', False);
reserved_params = []
if response_schema:
if not supports_download:
reserved_params = ['alt']
rtype = 'client::Result<(hyper::Response<hyper::body::Body>, %s)>' % (response_schema.id)
mtype_param = 'RS'
possible_urls = [m.path]
simple_media_param = None
resumable_media_param = None
if media_params:
type_params = '<%s>' % mtype_param
qualifier = ''
where = '\n\t\twhere ' + mtype_param + ': client::ReadSeek'
add_args = (', mut reader: %s, reader_mime_type: mime::Mime' % mtype_param) + ", protocol: &'static str"
for p in media_params:
if p.protocol == 'simple':
simple_media_param = p
elif p.protocol == 'resumable':
resumable_media_param = p
# end handle media params
if doit_without_upload:
action_fn = qualifier + 'async fn ' + "doit_without_upload" + type_params + '(mut self)' + ' -> ' + rtype + where
else:
action_fn = qualifier + 'async fn ' + api.terms.action + type_params + ('(mut self%s)' % add_args) + ' -> ' + rtype + where
field_params = [p for p in params if p.get('is_query_param', True)]
paddfields = 'self.' + api.properties.params
delegate = 'self.' + property(DELEGATE_PROPERTY_NAME)
delegate_finish = 'dlg.finished'
auth_call = 'self.hub.auth'
default_scope = method_default_scope(m)
# s = '{foo}' -> ('{foo}', 'foo') -> (find_this, replace_with)
seen = set()
replacements = list()
all_required_param_name = set(p.name for p in params if is_required_property(p))
MULTI_SLASH = 'multi-slash-prefix'
URL_ENCODE = 'url-encode'
READER_SEEK = "let size = reader.seek(io::SeekFrom::End(0)).unwrap();\nreader.seek(io::SeekFrom::Start(0)).unwrap();\n"
if media_params:
max_size = media_params[0].max_size
if max_size > 0:
READER_SEEK += "if size > %i {\n\treturn Err(client::Error::UploadSizeLimitExceeded(size, %i))\n}" % (max_size, max_size)
special_cases = set()
for possible_url in possible_urls:
for s in re_find_replacements.findall(possible_url):
if s in seen: continue
seen.add(s)
sn = s[1:-1]
# NOTE: We only handle the cases that are actually used in the schemas. If this shouldn't
# be worth it anymore (i.e. too many cases), then we should use a uri-template library
# to handle this at runtime, possibly, or use a python uri-template library, to more easily
# handle the required cases. Whatever is less work, I guess.
if sn.startswith('/') and sn.endswith('*'):
sn = sn[1:-1]
special_cases.add(MULTI_SLASH)
elif sn.startswith('+'):
sn = sn[1:]
special_cases.add(URL_ENCODE)
assert sn in all_required_param_name, "Expected param '%s' to be in required parameter list for substitution" % sn
replacements.append((s, sn))
# end for each found substitution
# Assure we can substitue everything
for s, d in replacements:
possible_url = possible_url.replace(s, d)
assert '{' not in possible_url, "Failed to replace all fields in '%s', have to parse expressions" % possible_url
# end for each possible url
del seen
%>
% if doit_without_upload:
/// Perform the operation you have build so far, but without uploading. This is used to e.g. renaming or updating the description for a file
% else:
/// Perform the operation you have build so far.
% endif
${action_fn} {
% if URL_ENCODE in special_cases:
use url::percent_encoding::{percent_encode, DEFAULT_ENCODE_SET};
% endif
use std::io::{Read, Seek};
use hyper::header::{CONTENT_TYPE, CONTENT_LENGTH, AUTHORIZATION, USER_AGENT, LOCATION};
use client::ToParts;
let mut dd = client::DefaultDelegate;
let mut dlg: &mut dyn client::Delegate = match ${delegate} {
Some(d) => d,
None => &mut dd
};
dlg.begin(client::MethodInfo { id: "${m.id}",
http_method: ${method_name_to_variant(m.httpMethod)} });
let mut params: Vec<(&str, String)> = Vec::with_capacity(${len(params) + len(reserved_params)} + ${paddfields}.len());
<%
if media_params and 'mediaUpload' in m:
upload_type_map = dict()
for mp in media_params:
if mp.protocol == 'simple':
upload_type_map[mp.protocol] = m.mediaUpload.protocols.simple.multipart and 'multipart' or 'media'
break
# for each meadia param
# end build media param map
%>\
% for p in field_params:
<%
pname = 'self.' + property(p.name) # property identifier
%>\
## parts can also be derived from the request, but we do that only if it's not set
% if p.name == 'part' and request_value:
% if is_repeated_property(p):
if ${pname}.is_empty() {
${pname}.push(self.${property(REQUEST_VALUE_PROPERTY_NAME)}.to_parts());
}
% else:
% if not is_required_property(p):
if ${pname}.is_none() {
${pname} = Some(self.${property(REQUEST_VALUE_PROPERTY_NAME)}.to_parts());
}
% else:
if ${pname}.len() == 0 {
${pname} = self.${property(REQUEST_VALUE_PROPERTY_NAME)}.to_parts();
}
% endif ## not is_required_property(p)
% endif is_repeated_property(p):
% endif ## p.name == 'part' and request_value:
% if p.get('repeated', False):
if ${pname}.len() > 0 {
for f in ${pname}.iter() {
params.push(("${p.name}", f.to_string()));
}
}
% elif not is_required_property(p):
if let Some(value) = ${pname} {
params.push(("${p.name}", value.to_string()));
}
% else:
params.push(("${p.name}", ${pname}.to_string()));
% endif
% endfor
## Additional params - may not overlap with optional params
for &field in [${', '.join(enclose_in('"', reserved_params + [p.name for p in field_params]))}].iter() {
if ${paddfields}.contains_key(field) {
${delegate_finish}(false);
return Err(client::Error::FieldClash(field));
}
}
for (name, value) in ${paddfields}.iter() {
params.push((&name, value.clone()));
}
% if response_schema:
% if supports_download:
let (json_field_missing, enable_resource_parsing) = {
let mut enable = true;
let mut field_present = true;
for &(name, ref value) in params.iter() {
if name == "alt" {
field_present = false;
if <String as AsRef<str>>::as_ref(&value) != "json" {
enable = false;
}
break;
}
}
(field_present, enable)
};
if json_field_missing {
params.push(("alt", "json".to_string()));
}
% else:
params.push(("alt", "json".to_string()));
% endif ## supportsMediaDownload
% endif ## response schema
% if media_params:
let (mut url, upload_type) =
% for mp in media_params:
% if loop.first:
if \
% else:
else if \
% endif
protocol == "${mp.protocol}" {
(self.hub._root_url.clone() + "${mp.path.lstrip('/')}", "${upload_type_map.get(mp.protocol, mp.protocol)}")
} \
% endfor
else {
unreachable!()
};
params.push(("uploadType", upload_type.to_string()));
% else:
let mut url = self.hub._base_url.clone() + "${m.path}";
% endif
% if not default_scope:
% if no_auth is UNDEFINED:
<%
assert 'key' in parameters, "Expected 'key' parameter if there are no scopes"
%>
let key = dlg.api_key();
match key {
Some(value) => params.push(("key", value)),
None => {
${delegate_finish}(false);
return Err(client::Error::MissingAPIKey)
}
}
% endif
% else:
if self.${api.properties.scopes}.len() == 0 {
self.${api.properties.scopes}.insert(${scope_url_to_variant(name, default_scope, fully_qualified=True)}.as_ref().to_string(), ());
}
% endif
## Hanlde URI Tempates
% if replacements:
for &(find_this, param_name) in [${', '.join('("%s", "%s")' % r for r in replacements)}].iter() {
<%
replace_init = ': Option<&str> = None'
replace_assign = 'Some(value)'
url_replace_arg = 'replace_with.expect("to find substitution value in params")'
if URL_ENCODE in special_cases:
replace_init = ' = String::new()'
replace_assign = 'value.to_string()'
url_replace_arg = '&replace_with'
# end handle url encoding
%>\
let mut replace_with${replace_init};
for &(name, ref value) in params.iter() {
if name == param_name {
replace_with = ${replace_assign};
break;
}
}
% if URL_ENCODE in special_cases:
if find_this.as_bytes()[1] == '+' as u8 {
replace_with = percent_encode(replace_with.as_bytes(), DEFAULT_ENCODE_SET).to_string();
}
% endif
url = url.replace(find_this, ${url_replace_arg});
}
## Remove all used parameters
{
let mut indices_for_removal: Vec<usize> = Vec::with_capacity(${len(replacements)});
for param_name in [${', '.join(reversed(['"%s"' % r[1] for r in replacements]))}].iter() {
if let Some(index) = params.iter().position(|t| &t.0 == param_name) {
indices_for_removal.push(index);
}
}
for &index in indices_for_removal.iter() {
params.remove(index);
}
}
% endif
let url = url::Url::parse_with_params(&url, params).unwrap();
% if request_value:
let mut json_mime_type: mime::Mime = "application/json".parse().unwrap();
let mut request_value_reader =
{
let mut value = json::value::to_value(&self.${property(REQUEST_VALUE_PROPERTY_NAME)}).expect("serde to work");
client::remove_json_null_values(&mut value);
let mut dst = io::Cursor::new(Vec::with_capacity(128));
json::to_writer(&mut dst, &value).unwrap();
dst
};
let request_size = request_value_reader.seek(io::SeekFrom::End(0)).unwrap();
request_value_reader.seek(io::SeekFrom::Start(0)).unwrap();
% endif
% if resumable_media_param:
let mut should_ask_dlg_for_url = false;
let mut upload_url_from_server;
let mut upload_url: Option<String> = None;
% endif
loop {
% if default_scope:
let token = match ${auth_call}.token(&self.${api.properties.scopes}.keys().collect::<Vec<_>>()[..]).await {
Ok(token) => token.clone(),
Err(err) => {
match dlg.token(&err) {
Some(token) => token,
None => {
${delegate_finish}(false);
return Err(client::Error::MissingToken(err))
}
}
}
};
% endif
% if request_value:
request_value_reader.seek(io::SeekFrom::Start(0)).unwrap();
% endif
let mut req_result = {
% if resumable_media_param:
if should_ask_dlg_for_url && (upload_url = dlg.upload_url()) == () && upload_url.is_some() {
should_ask_dlg_for_url = false;
upload_url_from_server = false;
Ok(hyper::Response::builder()
.status(hyper::StatusCode::OK)
.header("Location", upload_url.as_ref().unwrap().clone())
.body(hyper::body::Body::empty())
.unwrap())
} else {
% endif
<%block filter="indent_by(resumable_media_param and 4 or 0)">\
% if request_value and simple_media_param:
let mut mp_reader: client::MultiPartReader = Default::default();
let (mut body_reader, content_type) = match protocol {
"${simple_media_param.protocol}" => {
mp_reader.reserve_exact(2);
${READER_SEEK | indent_all_but_first_by(5)}
mp_reader.add_part(&mut request_value_reader, request_size, json_mime_type.clone())
.add_part(&mut reader, size, reader_mime_type.clone());
let mime_type = mp_reader.mime_type();
(&mut mp_reader as &mut (dyn io::Read + Send), (CONTENT_TYPE, mime_type.to_string()))
},
_ => (&mut request_value_reader as &mut (dyn io::Read + Send), (CONTENT_TYPE, json_mime_type.to_string())),
};
% endif
let client = &self.hub.client;
dlg.pre_request();
let mut req_builder = hyper::Request::builder().method(${method_name_to_variant(m.httpMethod)}).uri(url.clone().into_string())
.header(USER_AGENT, self.hub._user_agent.clone())\
% if default_scope:
.header(AUTHORIZATION, format!("Bearer {}", token.as_str()))\
% endif
;
% if resumable_media_param:
upload_url_from_server = true;
if protocol == "${resumable_media_param.protocol}" {
req_builder = req_builder.header("X-Upload-Content-Type", format!("{}", reader_mime_type));
}
% endif
% if request_value:
% if not simple_media_param:
let request = req_builder
.header(CONTENT_TYPE, format!("{}", json_mime_type.to_string()))
.header(CONTENT_LENGTH, request_size as u64)
.body(hyper::body::Body::from(request_value_reader.get_ref().clone()))\
% else:
let mut body_reader_bytes = vec![];
body_reader.read_to_end(&mut body_reader_bytes).unwrap();
let request = req_builder
.header(content_type.0, content_type.1.to_string())
.body(hyper::body::Body::from(body_reader_bytes))\
% endif ## not simple_media_param
% else:
% if simple_media_param:
let request = if protocol == "${simple_media_param.protocol}" {
${READER_SEEK | indent_all_but_first_by(4)}
let mut bytes = Vec::with_capacity(size as usize);
reader.read_to_end(&mut bytes)?;
req_builder.header(CONTENT_TYPE, reader_mime_type.to_string())
.header(CONTENT_LENGTH, size)
.body(hyper::body::Body::from(bytes))
} else {
req_builder.body(hyper::body::Body::from(Vec::new()))
}\
% else:
let request = req_builder
.body(hyper::body::Body::empty())\
% endif
% endif
;
client.request(request.unwrap()).await
</%block>\
% if resumable_media_param:
}
% endif
};
match req_result {
Err(err) => {
if let client::Retry::After(d) = dlg.http_error(&err) {
sleep(d);
continue;
}
${delegate_finish}(false);
return Err(client::Error::HttpError(err))
}
Ok(mut res) => {
if !res.status().is_success() {
let res_body_string = client::get_body_as_string(res.body_mut()).await;
let (parts, _) = res.into_parts();
let body = hyper::Body::from(res_body_string.clone());
let restored_response = hyper::Response::from_parts(parts, body);
let server_response = json::from_str::<serde_json::Value>(&res_body_string).ok();
if let client::Retry::After(d) = dlg.http_failure(&restored_response, server_response.clone()) {
sleep(d);
continue;
}
${delegate_finish}(false);
return match server_response {
Some(error_value) => Err(client::Error::BadRequest(error_value)),
None => Err(client::Error::Failure(restored_response)),
}
}
% if resumable_media_param:
if protocol == "${resumable_media_param.protocol}" {
${READER_SEEK | indent_all_but_first_by(6)}
let upload_result = {
let url_str = &res.headers().get("Location").expect("LOCATION header is part of protocol").to_str().unwrap();
if upload_url_from_server {
dlg.store_upload_url(Some(url_str));
}
client::ResumableUploadHelper {
client: &self.hub.client,
delegate: dlg,
start_at: if upload_url_from_server { Some(0) } else { None },
auth: &${auth_call},
user_agent: &self.hub._user_agent,
auth_header: format!("Bearer {}", token.as_str()),
url: url_str,
reader: &mut reader,
media_type: reader_mime_type.clone(),
content_length: size
}.upload().await
};
match upload_result {
None => {
${delegate_finish}(false);
return Err(client::Error::Cancelled)
}
Some(Err(err)) => {
## Do not ask the delgate again, as it was asked by the helper !
${delegate_finish}(false);
return Err(client::Error::HttpError(err))
}
## Now the result contains the actual resource, if any ... it will be
## decoded next
Some(Ok(upload_result)) => {
res = upload_result;
if !res.status().is_success() {
## delegate was called in upload() already - don't tell him again
dlg.store_upload_url(None);
${delegate_finish}(false);
return Err(client::Error::Failure(res))
}
}
}
}
% endif
% if response_schema:
## If 'alt' is not json, we cannot attempt to decode the response
let result_value = \
% if supports_download:
if enable_resource_parsing \
% endif
{
let res_body_string = client::get_body_as_string(res.body_mut()).await;
match json::from_str(&res_body_string) {
Ok(decoded) => (res, decoded),
Err(err) => {
dlg.response_json_decode_error(&res_body_string, &err);
return Err(client::Error::JsonDecodeError(res_body_string, err));
}
}
}\
% if supports_download:
else { (res, Default::default()) }\
% endif
;
% else:
let result_value = res;
% endif
${delegate_finish}(true);
return Ok(result_value)
}
}
}
}
% for p in media_params:
${p.description | rust_doc_sanitize, rust_doc_comment, indent_all_but_first_by(1)}
///
% for item_name, item in p.info.items():
/// * *${split_camelcase_s(item_name)}*: ${isinstance(item, (list, tuple)) and put_and(enclose_in("'", item)) or str(item)}
% endfor
pub async fn ${upload_action_fn(api.terms.upload_action, p.type.suffix)}<${mtype_param}>(self, ${p.type.arg_name}: ${mtype_param}, mime_type: mime::Mime) -> ${rtype}
where ${mtype_param}: client::ReadSeek {
self.${api.terms.action}(${p.type.arg_name}, mime_type, "${p.protocol}").await
}
% endfor
</%def>

View File

@@ -0,0 +1,132 @@
<%!
from generator.lib.util import (put_and, rust_test_fn_invisible, rust_doc_test_norun, rust_doc_comment,
rb_type, singular, hub_type, mangle_ident, mb_type, property,
to_fqan, indent_all_but_first_by, is_repeated_property, is_required_property,
activity_input_type, TREF, IO_REQUEST, schema_to_required_property,
rust_copy_value_s, organize_params, REQUEST_VALUE_PROPERTY_NAME,
build_all_params, rb_type_params_s, hub_type_params_s, mb_type_params_s, mb_additional_type_params,
struct_type_bounds_s, METHODS_RESOURCE, SPACES_PER_TAB, prefix_all_but_first_with,
METHODS_BUILDER_MARKER_TRAIT, remove_empty_lines, method_default_scope, rust_doc_sanitize)
%>\
<%namespace name="util" file="../../../lib/util.mako"/>\
<%namespace name="lib" file="lib.mako"/>\
## Creates a Resource builder type
###############################################################################################
###############################################################################################
<%def name="new(resource, c)">\
<%
hub_type_name = hub_type(schemas, util.canonical_name())
rb_params = rb_type_params_s(resource, c)
ThisType = rb_type(resource) + rb_params
%>\
% if resource == METHODS_RESOURCE:
/// A builder providing access to all free methods, which are not associated with a particular resource.
% else:
/// A builder providing access to all methods supported on *${singular(resource)}* resources.
% endif
/// It is not used directly, but through the `${hub_type_name}` hub.
///
/// # Example
///
/// Instantiate a resource builder
///
<%block filter="rust_doc_test_norun, rust_doc_comment">\
${util.test_prelude()}\
<%block filter="rust_test_fn_invisible">\
${lib.test_hub(hub_type_name, comments=False)}\
// Usually you wouldn't bind this to a variable, but keep calling *CallBuilders*
// like ${put_and(sorted('`%s(...)`' % mangle_ident(f) for f in c.rta_map[resource]))}
// to build up your call.
let rb = hub.${mangle_ident(resource)}();
</%block>
</%block>
pub struct ${ThisType}
where ${struct_type_bounds_s()} {
hub: &'a ${hub_type_name}${hub_type_params_s()},
}
impl${rb_params} ${METHODS_BUILDER_MARKER_TRAIT} for ${ThisType} {}
## Builder Creators Methods ####################
impl${rb_params} ${ThisType} {
% for a in c.rta_map[resource]:
<%
m = c.fqan_map[to_fqan(c.rtc_map[resource], resource, a)]
RType = mb_type(resource, a)
# skip part if we have a request resource. Only resources can have parts
# that we can easily deduce
params, request_value = build_all_params(c, m)
required_props, optional_props, part_prop = organize_params(params, request_value)
method_args = ''
if required_props:
method_args = ', ' + ', '.join('%s: %s' % (mangle_ident(p.name), activity_input_type(schemas, p)) for p in required_props)
mb_tparams = mb_type_params_s(m)
# we would could have information about data requirements for each property in it's dict.
# for now, we just hardcode it, and treat the entries as way to easily change param names
assert len(api.properties) == 2, "Hardcoded for now, thanks to scope requirements"
type_params = ''
if mb_additional_type_params(m):
type_params = '<%s>' % ', '.join(mb_additional_type_params(m))
%>\
% if 'description' in m:
/// Create a builder to help you perform the following task:
///
${m.description | rust_doc_sanitize, rust_doc_comment, indent_all_but_first_by(1)}
% endif
% if required_props:
///
/// # Arguments
///
% for p in required_props:
<%
arg_prefix = "/// * `" + p.name + "` - "
%>\
${arg_prefix}${p.get('description', "No description provided.")
| remove_empty_lines, prefix_all_but_first_with(' ' * SPACES_PER_TAB + '///' + ' ' * (len(arg_prefix) - len('///')))}
% endfor
% endif
pub fn ${mangle_ident(a)}${type_params}(&self${method_args}) -> ${RType}${mb_tparams} {
% if part_prop and request_value:
use client::ToParts;
% if is_repeated_property(part_prop):
let parts = vec![${mangle_ident(REQUEST_VALUE_PROPERTY_NAME)}.to_parts()];
% else:
% if not is_required_property(part_prop):
let parts = Some(${mangle_ident(REQUEST_VALUE_PROPERTY_NAME)}.to_parts());
% else:
let parts = ${mangle_ident(REQUEST_VALUE_PROPERTY_NAME)}.to_parts();
% endif ## not is_required_property(part_prop)
% endif is_repeated_property(part_prop):
% endif
${RType} {
hub: self.hub,
% for p in required_props:
${property(p.name)}: ${rust_copy_value_s(mangle_ident(p.name), activity_input_type(schemas, p), p)},
% endfor
## auto-generate parts from request resources
% if part_prop and request_value:
${property(part_prop.name)}: parts,
% endif
% for p in optional_props:
${property(p.name)}: Default::default(),
% endfor
% for prop_key, custom_name in api.properties.items():
% if prop_key == 'scopes' and not method_default_scope(m):
<% continue %>\
% endif
${custom_name}: Default::default(),
% endfor
}
}
% endfor ## for each activity
}
</%def>

View File

@@ -0,0 +1,164 @@
<%!
from generator.lib.util import (schema_markers, rust_doc_comment, mangle_ident, to_rust_type, put_and,
IO_TYPES, activity_split, enclose_in, REQUEST_MARKER_TRAIT, mb_type, indent_all_but_first_by,
NESTED_TYPE_SUFFIX, RESPONSE_MARKER_TRAIT, split_camelcase_s, METHODS_RESOURCE,
PART_MARKER_TRAIT, canonical_type_name, TO_PARTS_MARKER, UNUSED_TYPE_MARKER, is_schema_with_optionals,
rust_doc_sanitize, items)
%>\
## Build a schema which must be an object
###################################################################################################################
###################################################################################################################
<%def name="_new_object(s, properties, c, allow_optionals)">\
<% struct = 'pub struct ' + s.id %>\
% if properties:
${struct} {
% for pn, p in items(properties):
${p.get('description', 'no description provided') | rust_doc_sanitize, rust_doc_comment, indent_all_but_first_by(1)}
% if pn != mangle_ident(pn):
#[serde(rename="${pn}")]
% endif
pub ${mangle_ident(pn)}: ${to_rust_type(schemas, s.id, pn, p, allow_optionals=allow_optionals)},
% endfor
}
% elif 'additionalProperties' in s:
${struct}(pub ${to_rust_type(schemas, s.id, NESTED_TYPE_SUFFIX, s, allow_optionals=allow_optionals)});
% elif 'variant' in s:
<%
et = s.id
variant_type = lambda p: canonical_type_name(p.type_value)
%>\
pub enum ${et} {
% for p in s.variant.map:
${p.get('description', 'no description provided') | rust_doc_sanitize, rust_doc_comment, indent_all_but_first_by(1)}
% if variant_type(p) != p.type_value:
#[serde(rename="${p.type_value}")]
% endif
${variant_type(p)}(pub ${to_rust_type(schemas, s.id, None, p, allow_optionals=allow_optionals)}),
% endfor
}
impl Default for ${et} {
fn default() -> ${et} {
${et}::${variant_type(s.variant.map[0])}(Default::default())
}
}
% else: ## it's an empty struct, i.e. struct Foo;
## However, to enable the empty JSON object to be parsed, we set one unused optional parameter.
${struct} { _never_set: Option<bool> }
% endif ## 'properties' in s
</%def>
## Create new schema with everything.
## 's' contains the schema structure from json to build
###################################################################################################################
###################################################################################################################
<%def name="new(s, c)">\
<%
markers = schema_markers(s, c, transitive=True)
# We always need Serialization support, as others might want to serialize the response, even though we will
# only deserialize it.
# And since we don't know what others want to do, we implement Deserialize as well by default ...
traits = ['Clone', 'Debug', 'Serialize', 'Deserialize']
# default only works for structs, and 'variant' will be an enum
if 'variant' not in s:
traits.insert(0, 'Default')
nt_markers = schema_markers(s, c, transitive=False)
allow_optionals = is_schema_with_optionals(nt_markers)
# waiting for Default: https://github.com/rust-lang/rustc-serialize/issues/71
if s.type == 'any':
traits.remove('Default')
s_type = s.id
%>\
<%block filter="rust_doc_sanitize, rust_doc_comment">\
${doc(s, c)}\
</%block>
#[derive(${', '.join(traits)})]
% if s.type == 'object':
${_new_object(s, s.get('properties'), c, allow_optionals)}\
% elif s.type == 'array':
% if s.items.get('type') != 'object':
pub struct ${s_type}(${to_rust_type(schemas, s.id, NESTED_TYPE_SUFFIX, s, allow_optionals=allow_optionals)});
% else:
${_new_object(s, s.items.get('properties'), c, allow_optionals)}\
% endif ## array item != 'object'
% elif s.type == 'any':
## waiting for Default: https://github.com/rust-lang/rustc-serialize/issues/71
pub struct ${s_type}(pub json::Value);
impl Default for ${s_type} {
fn default() -> ${s_type} {
${s_type}(json::Value::Null)
}
}
% else:
<% assert False, "Object not handled: %s" % str(s) %>\
% endif ## type == ?
% for marker_trait in nt_markers:
% if marker_trait not in (TO_PARTS_MARKER, UNUSED_TYPE_MARKER):
impl ${marker_trait} for ${s_type} {}
% endif
% endfor
% if TO_PARTS_MARKER in nt_markers and allow_optionals:
impl ${TO_PARTS_MARKER} for ${s_type} {
/// Return a comma separated list of members that are currently set, i.e. for which `self.member.is_some()`.
/// The produced string is suitable for use as a parts list that indicates the parts you are sending, and/or
/// the parts you want to see in the server response.
fn to_parts(&self) -> String {
let mut r = String::new();
% for pn, p in items(s.properties):
<%
mn = 'self.' + mangle_ident(pn)
rt = to_rust_type(schemas, s.id, pn, p, allow_optionals=allow_optionals)
check = 'is_some()'
if rt.startswith('Vec') or rt.startswith('HashMap'):
check = 'len() > 0'
%>\
if ${mn}.${check} { r = r + "${pn},"; }
% endfor
## remove (possibly non-existing) trailing comma
r.pop();
r
}
}
% endif
</%def>
#########################################################################################################
#########################################################################################################
<%def name="doc(s, c)">\
${s.get('description', 'There is no detailed description.')}
% if s.id in c.sta_map:
# Activities
This type is used in activities, which are methods you may call on this type or where this type is involved in.
The list links the activity name, along with information about where it is used (one of ${put_and(enclose_in('*', IO_TYPES))}).
% for a, iot in c.sta_map[s.id].items():
<%
category, name, method = activity_split(a)
name_suffix = ' ' + split_camelcase_s(name)
if name == METHODS_RESOURCE:
name_suffix = ''
struct_url = mb_type(name, method)
method_name = ' '.join(split_camelcase_s(method).split('.')) + name_suffix
value_type = '|'.join(iot) or 'none'
%>\
* [${method_name}](${struct_url}) (${value_type})
% endfor
% else:
This type is not used in any activity, and only used as *part* of another schema.
% endif
% if s.type != 'object':
## for some reason, it's not shown in rustdoc ...
The contained type is `${to_rust_type(schemas, s.id, s.id, s)}`.
%endif
</%def>

View File

@@ -0,0 +1,99 @@
<%
from generator.lib.util import (markdown_comment, new_context)
from generator.lib.cli import (CONFIG_DIR, CONFIG_DIR_FLAG, SCOPE_FLAG, application_secret_path, DEBUG_FLAG)
c = new_context(schemas, resources, context.get('methods'))
%>\
<%namespace name="util" file="../../lib/util.mako"/>\
<%namespace name="argparse" file="lib/argparse.mako"/>\
<%block filter="markdown_comment">\
<%util:gen_info source="${self.uri}" />\
</%block>
The `${util.program_name()}` command-line interface *(CLI)* allows to use most features of the *Google ${util.canonical_name()}* service from the comfort of your terminal.
By default all output is printed to standard out, but flags can be set to direct it into a file independent of your shell's
capabilities. Errors will be printed to standard error, and cause the program's exit code to be non-zero.
If data-structures are requested, these will be returned as pretty-printed JSON, to be useful as input to other tools.
% if documentationLink:
Everything else about the *${util.canonical_name()}* API can be found at the
[official documentation site](${documentationLink}).
% endif
# Installation and Source Code
Install the command-line interface with cargo using:
```bash
cargo install ${util.crate_name()}
```
Find the source code [on github](${util.github_source_root_url()}).
# Usage
This documentation was generated from the *${util.canonical_name()}* API at revision *${revision is UNDEFINED and '00000000' or revision}*. The CLI is at version *${cargo.build_version}*.
```bash
${argparse.grammar(c)}
```
# Configuration
The program will store all persistent data in the `${CONFIG_DIR}` directory in *JSON* files prefixed with `${util.program_name()}-`. You can change the directory used to store configuration with the `--${CONFIG_DIR_FLAG}` flag on a per-invocation basis.
More information about the various kinds of persistent data are given in the following paragraphs.
# Authentication
Most APIs require a user to authenticate any request. If this is the case, the [scope][scopes] determines the
set of permissions granted. The granularity of these is usually no more than *read-only* or *full-access*.
If not set, the system will automatically select the smallest feasible scope, e.g. when invoking a
method that is read-only, it will ask only for a read-only scope.
You may use the `--${SCOPE_FLAG}` flag to specify a scope directly.
All applicable scopes are documented in the respective method's CLI documentation.
The first time a scope is used, the user is asked for permission. Follow the instructions given
by the CLI to grant permissions, or to decline.
If a scope was authenticated by the user, the respective information will be stored as *JSON* in the configuration
directory, e.g. `${CONFIG_DIR}/${util.program_name()}-token-<scope-hash>.json`. No manual management of these tokens
is necessary.
To revoke granted authentication, please refer to the [official documentation][revoke-access].
# Application Secrets
In order to allow any application to use Google services, it will need to be registered using the
[Google Developer Console][google-dev-console]. APIs the application may use are then enabled for it
one by one. Most APIs can be used for free and have a daily quota.
To allow more comfortable usage of the CLI without forcing anyone to register an own application, the CLI
comes with a default application secret that is configured accordingly. This also means that heavy usage
all around the world may deplete the daily quota.
You can workaround this limitation by putting your own secrets file at this location:
`${CONFIG_DIR}/${application_secret_path(util.program_name())}`, assuming that the required *${name}* API
was enabled for it. Such a secret file can be downloaded in the *Google Developer Console* at
*APIs & auth -> Credentials -> Download JSON* and used as is.
Learn more about how to setup Google projects and enable APIs using the [official documentation][google-project-new].
# Debugging
Even though the CLI does its best to provide usable error messages, sometimes it might be desirable to know
what exactly led to a particular issue. This is done by allowing all client-server communication to be
output to standard error *as-is*.
The `--${DEBUG_FLAG}` flag will print errors using the `Debug` representation to standard error.
You may consider redirecting standard error into a file for ease of use, e.g. `${util.program_name()} --${DEBUG_FLAG} <resource> <method> [options] 2>debug.txt`.
[scopes]: https://developers.google.com/+/api/oauth#scopes
[revoke-access]: http://webapps.stackexchange.com/a/30849
[google-dev-console]: https://console.developers.google.com/
[google-project-new]: https://developers.google.com/console/help/new/

View File

@@ -0,0 +1,223 @@
<%namespace name="util" file="../../../lib/util.mako"/>\
<%!
from mako.filters import xml_escape
from generator.lib.util import (hash_comment, new_context, method_default_scope, indent_all_but_first_by, is_repeated_property, custom_sorted)
from generator.lib.cli import (subcommand_md_filename, new_method_context, SPLIT_START, SPLIT_END, pretty, SCOPE_FLAG,
mangle_subcommand, is_request_value_property, FIELD_SEP, PARAM_FLAG, UPLOAD_FLAG, docopt_mode,
FILE_ARG, MIME_ARG, OUT_ARG, OUTPUT_FLAG, to_cli_schema, cli_schema_to_yaml, SchemaEntry,
STRUCT_FLAG, field_to_value, CTYPE_ARRAY, CTYPE_MAP, to_docopt_arg, FILE_FLAG, MIME_FLAG,
DEFAULT_MIME)
from copy import deepcopy
escape_html = lambda n: n.replace('>', r'\>')
NO_DESC = 'No description provided.'
%>\
<%
c = new_context(schemas, resources, context.get('methods'))
%>\
% for resource in sorted(c.rta_map.keys()):
% for method in sorted(c.rta_map[resource]):
<%
mc = new_method_context(resource, method, c)
%>\
${SPLIT_START} ${subcommand_md_filename(resource, method)}
% if 'description' in mc.m:
${mc.m.description | xml_escape}
% endif # show method description
% if mc.m.get('scopes'):
# Scopes
You will need authorization for \
% if len(mc.m.scopes) > 1:
at least one of the following scopes to make a valid call:
% for s in mc.m.scopes:
* *${s}*
% endfor
% else:
the *${mc.m.scopes[0]}* scope to make a valid call.
% endif # len(scopes) > 1
If unset, the scope for this method defaults to *${method_default_scope(mc.m)}*.
You can set the scope for this method like this: `${util.program_name()} --${SCOPE_FLAG} <scope> ${mangle_subcommand(resource)} ${mangle_subcommand(method)} ...`
% endif # have method scopes
<%
rprops = [p for p in mc.required_props if not is_request_value_property(mc, p)]
oprops = [p for p in mc.optional_props if not p.get('skip_example', False)]
smd = mc.m.get('supportsMediaDownload', False)
%>\
% if rprops:
# Required Scalar ${len(rprops) > 1 and 'Arguments' or 'Argument'}
% for p in rprops:
* **${to_docopt_arg(p) | xml_escape}** *(${p.type})*
- ${p.get('description') or NO_DESC | xml_escape, indent_all_but_first_by(2)}
% if p.get('repeated'):
- This property can be specified one or more times
% endif
% endfor # each required property (which is not the request value)
% endif # have required properties
% if mc.request_value:
<%
request_cli_schema = to_cli_schema(c, mc.request_value)
%>\
# Required Request Value
The request value is a data-structure with various fields. Each field may be a simple scalar or another data-structure.
In the latter case it is advised to set the field-cursor to the data-structure's field to specify values more concisely.
For example, a structure like this:
```
${cli_schema_to_yaml(request_cli_schema)}
```
can be set completely with the following arguments which are assumed to be executed in the given order. Note how the cursor position is adjusted to the respective structures, allowing simple field names to be used most of the time.
${self._list_schem_args(request_cli_schema)}
${'###'} About Cursors
The cursor position is key to comfortably set complex nested structures. The following rules apply:
* The cursor position is always set relative to the current one, unless the field name starts with the `${FIELD_SEP}` character. Fields can be nested such as in `-${STRUCT_FLAG} f${FIELD_SEP}s${FIELD_SEP}o` .
* The cursor position is set relative to the top-level structure if it starts with `${FIELD_SEP}`, e.g. `-${STRUCT_FLAG} ${FIELD_SEP}s${FIELD_SEP}s`
* You can also set nested fields without setting the cursor explicitly. For example, to set a value relative to the current cursor position, you would specify `-${STRUCT_FLAG} struct${FIELD_SEP}sub_struct=bar`.
* You can move the cursor one level up by using `${FIELD_SEP}${FIELD_SEP}`. Each additional `${FIELD_SEP}` moves it up one additional level. E.g. `${FIELD_SEP}${FIELD_SEP}${FIELD_SEP}` would go three levels up.
% endif # have request value
% if mc.media_params:
<%
protocols = [mp.protocol for mp in mc.media_params]
%>\
# Required Upload Flags
This method supports the upload of data, which *requires* all of the following flags to be set:
* **-${UPLOAD_FLAG} ${docopt_mode(protocols)}**
% for mp in mc.media_params:
- **${mp.protocol}** - ${mp.get('description', NO_DESC).split('\n')[0] | xml_escape}
% endfor # each media param
* **-${FILE_FLAG} ${escape_html(FILE_ARG)}**
- Path to file to upload. It must be seekable.
The following flag *may* be set:
* **-${MIME_FLAG} ${escape_html(MIME_ARG)}**
- the mime type, like '${DEFAULT_MIME}', which is the default
% endif # have upload capabilities
% if mc.response_schema or smd:
# Optional Output Flags
% if mc.response_schema:
The method's return value a JSON encoded structure, which will be written to standard output by default.
% endif
% if smd:
% if mc.response_schema:
As this method supports **media download**, you may specify the `-${PARAM_FLAG} alt=media` flag to set the output to be an octet stream of the underlying media. In that case, you will not receive JSON output anymore.
% else:
The method's return value is a byte stream of the downloadable resource.
% endif # handle response schema
% endif # support media download
* **-${OUTPUT_FLAG} ${escape_html(OUT_ARG)}**
- *${escape_html(OUT_ARG)}* specifies the *destination* to which to write the server's result to.
% if smd and mc.response_schema:
It will either be a JSON-encoded structure, or the media file you are downloading.
% elif smd:
It will be a byte stream of the downloadable resource.
% else:
It will be a JSON-encoded structure.
% endif
The *destination* may be `-` to indicate standard output, or a filepath that is to contain the received bytes.
If unset, it defaults to standard output.
% endif # have output
% if oprops:
# Optional Method Properties
You may set the following properties to further configure the call. Please note that `-${PARAM_FLAG}` is followed by one
or more key-value-pairs, and is called like this `-${PARAM_FLAG} k1=v1 k2=v2` even though the listing below repeats the
`-${PARAM_FLAG}` for completeness.
% for p in custom_sorted(oprops):
${self._md_property(p)}
% endfor
% endif # optional method properties
% if parameters is not UNDEFINED:
# Optional General Properties
The following properties can configure any call, and are not specific to this method.
% for pn in sorted(parameters.keys()):
<%
p = deepcopy(parameters[pn])
p.name = pn
%>\
${self._md_property(p)}
% endfor
% endif # general parameters
${SPLIT_END}
% endfor # each method
% endfor # each resource
<%def name="_md_property(p)">\
* **-${PARAM_FLAG} ${mangle_subcommand(p.name)}=${p.type}**
- ${p.get('description') or NO_DESC | xml_escape ,indent_all_but_first_by(2)}
</%def>
<%def name="_list_schem_args(schema, cursor_tokens=list(), first_flag=None)">\
<%
if len(cursor_tokens) == 0:
cursor_tokens = [FIELD_SEP]
if first_flag is None:
first_flag = '-%s ' % STRUCT_FLAG
def cursor_fmt(cursor):
fndfi = 0 # first non-dot field index
for (fndfi, v) in enumerate(cursor):
if v != FIELD_SEP:
break
res = ''.join(cursor[:fndfi]) + FIELD_SEP.join(cursor[fndfi:])
res += ' '
return res
def cursor_arg(field):
prefix = ''
if cursor_tokens:
prefix = cursor_fmt(cursor_tokens)
del cursor_tokens[:]
return prefix + field
%>\
% for fni, fn in enumerate(sorted(schema.fields.keys())):
<%
f = schema.fields[fn]
if fni > 0:
first_flag = ''
%>\
% if isinstance(f, SchemaEntry):
* `${first_flag}${cursor_arg(mangle_subcommand(fn))}=${field_to_value(f)}`
- ${f.property.get('description', NO_DESC) | xml_escape, indent_all_but_first_by(2)}
% if f.container_type == CTYPE_ARRAY:
- Each invocation of this argument appends the given value to the array.
% elif f.container_type == CTYPE_MAP:
- the value will be associated with the given `key`
% endif # handle container type
% else:
<%
cursor_tokens.append(mangle_subcommand(fn))
%>\
${self._list_schem_args(f, cursor_tokens, first_flag)}
<%
assert not cursor_tokens or cursor_tokens[-1] == FIELD_SEP
if not cursor_tokens:
cursor_tokens.append(FIELD_SEP)
cursor_tokens.append(FIELD_SEP)
%>\
% endif
% endfor
</%def>

View File

@@ -0,0 +1,297 @@
<%namespace name="util" file="../../../lib/util.mako"/>\
<%!
import os
from generator.lib.util import (put_and, supports_scopes, api_index, indent_by, enclose_in, put_and, escape_rust_string)
from generator.lib.cli import (mangle_subcommand, new_method_context, PARAM_FLAG, STRUCT_FLAG, UPLOAD_FLAG, OUTPUT_FLAG, VALUE_ARG,
CONFIG_DIR, SCOPE_FLAG, is_request_value_property, FIELD_SEP, docopt_mode, FILE_ARG, MIME_ARG, OUT_ARG,
CONFIG_DIR_FLAG, KEY_VALUE_ARG, to_docopt_arg, DEBUG_FLAG, MODE_ARG, SCOPE_ARG,
CONFIG_DIR_ARG, FILE_FLAG, MIME_FLAG, subcommand_md_filename)
def rust_boolean(v):
return v and 'true' or 'false'
def rust_optional(v):
if v is None:
return 'None'
if isinstance(v, bool):
v = v and 'true' or 'false'
elif isinstance(v, str):
v = 'r##"%s"##' % v
elif isinstance(v, list):
v = 'vec![%s]' % ','.join('UploadProtocol::%s' % p.capitalize() for p in v)
return 'Some(%s)' % v
%>\
<%def name="grammar(c)">\
${util.program_name()} [options]
% for resource in sorted(c.rta_map.keys()):
${mangle_subcommand(resource)}
% for method in sorted(c.rta_map[resource]):
<%
mc = new_method_context(resource, method, c)
args = list()
for p in mc.required_props:
if is_request_value_property(mc, p):
continue
args.append(to_docopt_arg(p))
# end for each required property
if mc.request_value:
args.append('(-%s %s)...' % (STRUCT_FLAG, '<%s>' % KEY_VALUE_ARG))
# end request_value
if mc.media_params:
upload_protocols = [mp.protocol for mp in mc.media_params]
mode = docopt_mode(upload_protocols)
args.append('(-%s %s -%s <%s> [-%s <%s>])' % (UPLOAD_FLAG, mode, FILE_FLAG, FILE_ARG, MIME_FLAG, MIME_ARG))
# end upload handling
if mc.optional_props or parameters is not UNDEFINED:
args.append('[-%s %s]...' % (PARAM_FLAG, '<%s>' % VALUE_ARG))
# end parameters
if mc.response_schema or mc.m.get('supportsMediaDownload', False):
args.append('[-%s <%s>]' % (OUTPUT_FLAG, OUT_ARG))
# handle output
%>\
${mangle_subcommand(method)} ${' '.join(args)}
% endfor # each method
% endfor # end for each resource
${util.program_name()} --help
Configuration:
% if supports_scopes(auth):
[--${SCOPE_FLAG} <${SCOPE_ARG}>]...
Specify the authentication a method should be executed in. Each scope
requires the user to grant this application permission to use it.
If unset, it defaults to the shortest scope url for a particular method.
% endif scopes
--${CONFIG_DIR_FLAG} <${CONFIG_DIR_ARG}>
A directory into which we will store our persistent data. Defaults to
a user-writable directory that we will create during the first invocation.
[default: ${CONFIG_DIR}]
</%def>
<%def name="new(c)" buffered="True">\
<%
doc_base_url = cargo.doc_base_url + '/' + os.path.dirname(api_index(cargo.doc_base_url, name,
version, make, cargo, revision, check_exists=False))
url_info = "All documentation details can be found at " + doc_base_url
# list of tuples
# (0) = long name
# (1) = description
# (2) = argument name, no argument if no argument
# (3) = multiple
global_args = list()
if supports_scopes(auth):
global_args.append((
SCOPE_FLAG,
"Specify the authentication a method should be executed in. Each scope "
"requires the user to grant this application permission to use it."
"If unset, it defaults to the shortest scope url for a particular method.",
SCOPE_ARG,
True
))
# end add scope arg
global_args.append((
CONFIG_DIR_FLAG,
"A directory into which we will store our persistent data. Defaults to "
"a user-writable directory that we will create during the first invocation."
"[default: %s" % CONFIG_DIR,
CONFIG_DIR_ARG,
False,
))
global_args.append((
DEBUG_FLAG,
"Debug print all errors",
None,
False,
))
%>\
<%
have_media_params = False
for resource in sorted(c.rta_map.keys()):
methods = sorted(c.rta_map[resource])
for method in methods:
mc = new_method_context(resource, method, c)
if mc.media_params:
have_media_params = True
break
# end for each method
# end for each resource
%>\
% if have_media_params:
let upload_value_names = ["${MODE_ARG}", "${FILE_ARG}"];
% endif
let arg_data = [
% for resource in sorted(c.rta_map.keys()):
<%
methods = sorted(c.rta_map[resource])
%>\
<%block filter="indent_by(4)">\
("${mangle_subcommand(resource)}", "methods: ${put_and(["'%s'" % mangle_subcommand(m) for m in methods])}", vec![
% for method in methods:
<%
mc = new_method_context(resource, method, c)
# A list of tuples
# (0) = short flag, like -c
# (1) = param description or None
# (2) = argument name, or None if there is no argument
# (3) = is required (bool)
# (4) = allow multi-use
args = list()
for p in mc.required_props:
if is_request_value_property(mc, p):
continue
args.append((
None,
p.get('description'),
mangle_subcommand(p.name),
True,
False,
))
# end for each required property
if mc.request_value:
args.append((
STRUCT_FLAG,
"Set various fields of the request structure, matching the key=value form",
KEY_VALUE_ARG,
True,
True,
))
# end request_value
if mc.media_params:
args.append((
UPLOAD_FLAG,
"Specify the upload protocol (%s) and the file to upload" % '|'.join(mp.protocol for mp in mc.media_params),
MODE_ARG,
True,
True,
))
# end upload handling
if mc.optional_props or parameters is not UNDEFINED:
args.append((
PARAM_FLAG,
"Set various optional parameters, matching the key=value form",
VALUE_ARG,
False,
True,
))
# end parameters
if mc.response_schema or mc.m.get('supportsMediaDownload', False):
args.append((
OUTPUT_FLAG,
"Specify the file into which to write the program's output",
OUT_ARG,
False,
False,
))
# handle output
%>\
("${mangle_subcommand(method)}",
${rust_optional(mc.m.get('description'))},
"Details at ${doc_base_url}/${os.path.splitext(subcommand_md_filename(resource, method))[0]}",
vec![
% for flag, desc, arg_name, required, multi in args:
(${rust_optional(arg_name)},
${rust_optional(flag)},
${rust_optional(desc)},
${rust_optional(required)},
${rust_optional(multi)}),
% if not loop.last:
% endif
% endfor
]),
% endfor # each method
]),
</%block>
% endfor # end for each resource
];
let mut app = App::new("${util.program_name()}")
<%block filter="indent_by(7)">\
.author("${', '.join(cargo.authors)}")
.version("${util.crate_version()}")
% if description is not UNDEFINED:
.about("${escape_rust_string(description)}")
% endif
.after_help("${url_info}")
% for flag, desc, arg_name, multiple in global_args:
.arg(Arg::with_name("${arg_name or flag}")
.long("${flag}")
.help("${desc}")
.multiple(${rust_boolean(multiple)})
.takes_value(${rust_boolean(arg_name)}))\
% if loop.last:
;
% else:
% endif
% endfor
for &(main_command_name, about, ref subcommands) in arg_data.iter() {
let mut mcmd = SubCommand::with_name(main_command_name).about(about);
for &(sub_command_name, ref desc, url_info, ref args) in subcommands {
let mut scmd = SubCommand::with_name(sub_command_name);
if let &Some(desc) = desc {
scmd = scmd.about(desc);
}
scmd = scmd.after_help(url_info);
for &(ref arg_name, ref flag, ref desc, ref required, ref multi) in args {
let arg_name_str =
match (arg_name, flag) {
(&Some(an), _ ) => an,
(_ , &Some(f)) => f,
_ => unreachable!(),
};
let mut arg = Arg::with_name(arg_name_str)
.empty_values(false);
if let &Some(short_flag) = flag {
arg = arg.short(short_flag);
}
if let &Some(desc) = desc {
arg = arg.help(desc);
}
if arg_name.is_some() && flag.is_some() {
arg = arg.takes_value(true);
}
if let &Some(required) = required {
arg = arg.required(required);
}
if let &Some(multi) = multi {
arg = arg.multiple(multi);
}
% if have_media_params:
if arg_name_str == "${MODE_ARG}" {
arg = arg.number_of_values(2);
arg = arg.value_names(&upload_value_names);
scmd = scmd.arg(Arg::with_name("${MIME_ARG}")
.short("${MIME_FLAG}")
.requires("${MODE_ARG}")
.required(false)
.help("The file's mime time, like 'application/octet-stream'")
.takes_value(true));
}
% endif
scmd = scmd.arg(arg);
}
mcmd = mcmd.subcommand(scmd);
}
app = app.subcommand(mcmd);
}
</%block>
</%def>

View File

@@ -0,0 +1,419 @@
<%namespace name="util" file="../../../lib/util.mako"/>\
<%!
from generator.lib.util import (hub_type, mangle_ident, indent_all_but_first_by, activity_rust_type, setter_fn_name, ADD_PARAM_FN,
upload_action_fn, is_schema_with_optionals, schema_markers, indent_by, method_default_scope,
ADD_SCOPE_FN, TREF, enclose_in)
from generator.lib.cli import (mangle_subcommand, new_method_context, PARAM_FLAG, STRUCT_FLAG, OUTPUT_FLAG, VALUE_ARG,
CONFIG_DIR, SCOPE_FLAG, is_request_value_property, FIELD_SEP, docopt_mode, FILE_ARG, MIME_ARG, OUT_ARG,
call_method_ident, POD_TYPES, opt_value, ident, JSON_TYPE_VALUE_MAP,
KEY_VALUE_ARG, to_cli_schema, SchemaEntry, CTYPE_POD, actual_json_type, CTYPE_MAP, CTYPE_ARRAY,
application_secret_path, CONFIG_DIR_FLAG, req_value, MODE_ARG,
opt_values, SCOPE_ARG, CONFIG_DIR_ARG, DEFAULT_MIME, field_vec, comma_sep_fields, JSON_TYPE_TO_ENUM_MAP,
CTYPE_TO_ENUM_MAP)
v_arg = '<%s>' % VALUE_ARG
SOPT = 'self.opt'
def borrow_prefix(p):
ptype = p.get('type', None)
borrow = ''
if (ptype not in POD_TYPES or ptype is None or p.get('repeated', False)) and ptype is not None:
borrow = '&'
return borrow
def gen_global_parameter_names(parameters):
if parameters is not UNDEFINED:
return [pn for pn in sorted(parameters.keys())]
else:
return list()
%>\
<%def name="new(c)">\
<%
hub_type_name = 'api::' + hub_type(c.schemas, util.canonical_name())
%>\
use client::{InvalidOptionsError, CLIError, arg_from_str, writer_from_opts, parse_kv_arg,
input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol,
calltype_from_str, remove_json_null_values, ComplexType, JsonType, JsonTypeInfo};
use std::default::Default;
use std::error::Error as StdError;
use std::str::FromStr;
use serde_json as json;
use clap::ArgMatches;
use http::Uri;
use hyper::client::connect;
use tokio::io::{AsyncRead, AsyncWrite};
use tower_service;
enum DoitError {
IoError(String, io::Error),
ApiError(Error),
}
struct Engine<'n, S> {
opt: ArgMatches<'n>,
hub: ${hub_type_name}<S>,
gp: ${"Vec<&'static str>"},
gpm: Vec<(&'static str, &'static str)>,
}
impl<'n, S> Engine<'n, S>
where
S: tower_service::Service<Uri> + Clone + Send + Sync + 'static,
S::Response: hyper::client::connect::Connection + AsyncRead + AsyncWrite + Send + Unpin + 'static,
S::Future: Send + Unpin + 'static,
S::Error: Into<Box<dyn StdError + Send + Sync>>,
{
% for resource in sorted(c.rta_map.keys()):
% for method in sorted(c.rta_map[resource]):
async fn ${call_method_ident(resource, method)}(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
${self._method_call_impl(c, resource, method) | indent_all_but_first_by(2)}
}
% endfor # each method
% endfor
async fn _doit(&self, dry_run: bool) -> Result<Result<(), DoitError>, Option<InvalidOptionsError>> {
let mut err = InvalidOptionsError::new();
let mut call_result: Result<(), DoitError> = Ok(());
let mut err_opt: Option<InvalidOptionsError> = None;
## RESOURCE LOOP: check for set primary subcommand
match ${SOPT + '.subcommand()'} {
% for resource in sorted(c.rta_map.keys()):
("${mangle_subcommand(resource)}", Some(opt)) => {
match opt.subcommand() {
% for method in sorted(c.rta_map[resource]):
("${mangle_subcommand(method)}", Some(opt)) => {
call_result = self.${call_method_ident(resource, method)}(opt, dry_run, &mut err).await;
},
% endfor # each method
_ => {
err.issues.push(CLIError::MissingMethodError("${mangle_subcommand(resource)}".to_string()));
writeln!(io::stderr(), "{}\n", opt.usage()).ok();
}
}
},
% endfor # each resource
_ => {
err.issues.push(CLIError::MissingCommandError);
writeln!(io::stderr(), "{}\n", ${SOPT}.usage()).ok();
}
}
if dry_run {
if err.issues.len() > 0 {
err_opt = Some(err);
}
Err(err_opt)
} else {
Ok(call_result)
}
}
// Please note that this call will fail if any part of the opt can't be handled
async fn new(opt: ArgMatches<'n>, connector: S) -> Result<Engine<'n, S>, InvalidOptionsError> {
let (config_dir, secret) = {
let config_dir = match client::assure_config_dir_exists(opt.value_of("${CONFIG_DIR_ARG}").unwrap_or("${CONFIG_DIR}")) {
Err(e) => return Err(InvalidOptionsError::single(e, 3)),
Ok(p) => p,
};
match client::application_secret_from_directory(&config_dir, "${application_secret_path(util.program_name())}",
"${api.credentials.replace('"', r'\"')}") {
Ok(secret) => (config_dir, secret),
Err(e) => return Err(InvalidOptionsError::single(e, 4))
}
};
let client = hyper::Client::builder().build(connector);
let auth = oauth2::InstalledFlowAuthenticator::with_client(
secret,
oauth2::InstalledFlowReturnMethod::HTTPRedirect,
client.clone(),
).persist_tokens_to_disk(format!("{}/${util.program_name()}", config_dir)).build().await.unwrap();
<% gpm = gen_global_parameter_names(parameters) %>\
let engine = Engine {
opt: opt,
hub: ${hub_type_name}::new(client, auth),
gp: ${field_vec(gpm)},
gpm: vec![
% for pn in list(pn for pn in gpm if mangle_subcommand(pn) != pn):
("${mangle_subcommand(pn)}", "${pn}"),
% endfor # each global parameter
]
};
match engine._doit(true).await {
Err(Some(err)) => Err(err),
Err(None) => Ok(engine),
Ok(_) => unreachable!(),
}
}
async fn doit(&self) -> Result<(), DoitError> {
match self._doit(false).await {
Ok(res) => res,
Err(_) => unreachable!(),
}
}
}
</%def>
<%def name="_method_call_impl(c, resource, method)" buffered="True">\
<%
mc = new_method_context(resource, method, c)
supports_media_download = mc.m.get('supportsMediaDownload', False)
handle_output = mc.response_schema or supports_media_download
optional_props = [p for p in mc.optional_props if not p.get('skip_example', False)]
optional_prop_names = set(p.name for p in optional_props)
track_download_flag = (not mc.media_params and
supports_media_download and
(parameters is not UNDEFINED and 'alt' in parameters) or ('alt' in optional_prop_names))
handle_props = optional_props or parameters is not UNDEFINED
if mc.request_value:
request_cli_schema = to_cli_schema(c, mc.request_value)
request_prop_type = None
global_parameter_names = gen_global_parameter_names(parameters)
%>\
## REQUIRED PARAMETERS
% for p in mc.required_props:
<%
prop_name = mangle_ident(p.name)
prop_type = activity_rust_type(c.schemas, p, allow_optionals=False)
%>\
% if is_request_value_property(mc, p):
<% request_prop_type = prop_type %>\
${self._request_value_impl(c, request_cli_schema, prop_name, request_prop_type)}\
% elif p.type != 'string':
% if p.get('repeated', False):
let ${prop_name}: Vec<${prop_type} = Vec::new();
for (arg_id, arg) in ${opt_values(mangle_subcommand(p.name))}.enumerate() {
${prop_name}.push(arg_from_str(&arg, err, "<${mangle_subcommand(p.name)}>", arg_id), "${p.type}"));
}
% else:
let ${prop_name}: ${prop_type} = arg_from_str(&${opt_value(p.name)}, err, "<${mangle_subcommand(p.name)}>", "${p.type}");
% endif # handle repeated values
% endif # handle request value
% endfor # each required parameter
<%
call_args = list()
for p in mc.required_props:
borrow = ''
# if type is not available, we know it's the request value, which should also be borrowed
borrow = borrow_prefix(p)
arg_name = mangle_ident(p.name)
if p.get('type', '') == 'string':
if p.get('repeated', False):
arg_name = opt_values(p.name) + '.map(|&v| v.to_string()).collect::<Vec<String>>()'
else:
arg_name = opt_value(p.name)
call_args.append(borrow + arg_name)
# end for each required prop
%>\
% if track_download_flag:
let mut download_mode = false;
% endif
let mut call = self.hub.${mangle_ident(resource)}().${mangle_ident(method)}(${', '.join(call_args)});
% if handle_props:
for parg in ${opt_values(VALUE_ARG)} {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
% for p in optional_props:
<%
ptype = actual_json_type(p.name, p.type)
value_unwrap = 'value.unwrap_or("%s")' % JSON_TYPE_VALUE_MAP[ptype]
%>\
"${mangle_subcommand(p.name)}" => {
% if p.name == 'alt':
if ${value_unwrap} == "media" {
download_mode = true;
}
% endif
call = call.${mangle_ident(setter_fn_name(p))}(\
% if ptype != 'string':
arg_from_str(${value_unwrap}, err, "${mangle_subcommand(p.name)}", "${ptype}")\
% else:
${value_unwrap}\
% endif # handle conversion
);
},
% endfor # each property
_ => {
<%
value_unwrap = 'value.unwrap_or("unset")'
%>\
let mut found = false;
for param in &self.gp {
if key == *param {
% if track_download_flag and 'alt' in global_parameter_names:
if key == "alt" && ${value_unwrap} == "media" {
download_mode = true;
}
% endif
found = true;
call = call.${ADD_PARAM_FN}(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, ${value_unwrap});
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
% if comma_sep_fields(optional_prop_names):
v.extend([${comma_sep_fields(optional_prop_names)}].iter().map(|v|*v));
% endif
v } ));
}
}
}
}
% endif # handle call parameters
% if mc.media_params:
let vals = opt.values_of("${MODE_ARG}").unwrap().collect::<Vec<${'&'}str>>();
let protocol = calltype_from_str(vals[0], [${', '.join('"%s"' % mp.protocol for mp in mc.media_params)}].iter().map(|&v| v.to_string()).collect(), err);
let mut input_file = input_file_from_opts(vals[1], err);
let mime_type = input_mime_from_opts(${opt_value(MIME_ARG, default=DEFAULT_MIME)}, err);
% else:
let protocol = CallType::Standard;
% endif # support upload
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
% if method_default_scope(mc.m):
for scope in ${opt_values(SCOPE_ARG, opt=SOPT)} {
call = call.${ADD_SCOPE_FN}(scope);
}
% endif
## Make the call, handle uploads, handle downloads (also media downloads|json decoding)
% if handle_output:
let mut ostream = match writer_from_opts(opt.value_of("${(OUT_ARG)}")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(${opt_value(OUT_ARG, default='-')}.to_string(), io_err)),
};
% endif # handle output
match match protocol {
% if mc.media_params:
% for p in mc.media_params:
CallType::Upload(UploadProtocol::${p.protocol.capitalize()}) => call.${upload_action_fn(api.terms.upload_action, p.type.suffix)}(input_file.unwrap(), mime_type.unwrap()).await,
% endfor
CallType::Standard => unreachable!()
% else:
CallType::Standard => call.${api.terms.action}().await,
_ => unreachable!()
% endif
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
% if mc.response_schema:
Ok((mut response, output_schema)) => {
% else:
Ok(mut response) => {
% endif # handle output structure
## We are not generating optimal code, but hope it will still be logically correct.
## If not, we might build the code in python
## TODO: Fix this
% if track_download_flag:
if !download_mode {
% endif
% if mc.response_schema:
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
% endif
% if track_download_flag:
} else {
% endif
% if supports_media_download:
## Download is the only option - nothing else matters
let bytes = hyper::body::to_bytes(response.into_body()).await.expect("a string as API currently is inefficient").to_vec();
ostream.write_all(&bytes).expect("write to be complete");
ostream.flush().expect("io to never fail which should really be fixed one day");
% endif
% if track_download_flag:
}
% endif
Ok(())
}
}
}\
</%def>
<%def name="_request_value_impl(c, request_cli_schema, request_prop_name, request_prop_type)">
<%
allow_optionals_fn = lambda s: is_schema_with_optionals(schema_markers(s, c, transitive=False))
def flatten_schema_fields(schema, res, fields, cur=list()):
if len(cur) == 0:
cur = list()
opt_access = '.as_mut().unwrap()'
allow_optionals = allow_optionals_fn(schema)
if not allow_optionals:
opt_access = ''
for fn, f in schema.fields.items():
cur.append(['%s%s' % (mangle_ident(fn), opt_access), fn])
fields.add(fn)
if isinstance(f, SchemaEntry):
cur[-1][0] = mangle_ident(fn)
res.append((schema, f, list(cur)))
else:
flatten_schema_fields(f, res, fields, cur)
cur.pop()
# endfor
# end utility
schema_fields = list()
fields = set()
flatten_schema_fields(request_cli_schema, schema_fields, fields)
%>\
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in ${opt_values(KEY_VALUE_ARG)} {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
## This type-annotation is not required in nightly (or newer rustc)
## TODO(ST): try to remove it once there is a newer stable
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
% for schema, fe, f in schema_fields:
<%
pname = FIELD_SEP.join(mangle_subcommand(t[1]) for t in f)
sname = FIELD_SEP.join(t[1] for t in f)
ptype = actual_json_type(f[-1][1], fe.actual_property.type)
jtype = 'JsonType::' + JSON_TYPE_TO_ENUM_MAP[ptype]
ctype = 'ComplexType::' + CTYPE_TO_ENUM_MAP[fe.container_type]
%>\
"${pname}" => Some(("${sname}", JsonTypeInfo { jtype: ${jtype}, ctype: ${ctype} })),
% endfor # each nested field
_ => {
let suggestion = FieldCursor::did_you_mean(key, &${field_vec(sorted(fields))});
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut ${request_prop_name}: api::${request_prop_type} = json::value::from_value(object).unwrap();
</%def>

View File

@@ -0,0 +1,70 @@
<%namespace name="argparse" file="lib/argparse.mako"/>\
<%namespace name="engine" file="lib/engine.mako"/>\
<%namespace name="util" file="../../lib/util.mako"/>\
<%
from generator.lib.util import (new_context, rust_comment, to_extern_crate_name, library_to_crate_name, library_name,
indent_all_but_first_by)
from generator.lib.cli import OUT_ARG, DEBUG_FLAG, opt_value
c = new_context(schemas, resources, context.get('methods'))
default_user_agent = "google-cli-rust-client/" + cargo.build_version
%>\
<%block filter="rust_comment">\
<%util:gen_info source="${self.uri}" />\
</%block>
#![allow(unused_variables, unused_imports, dead_code, unused_mut)]
extern crate tokio;
#[macro_use]
extern crate clap;
use std::env;
use std::io::{self, Write};
use clap::{App, SubCommand, Arg};
use ${to_extern_crate_name(library_to_crate_name(library_name(name, version), make.depends_on_suffix))}::{api, Error, oauth2};
mod client;
${engine.new(c)}\
#[tokio::main]
async fn main() {
let mut exit_status = 0i32;
${argparse.new(c) | indent_all_but_first_by(1)}\
let matches = app.get_matches();
let debug = matches.is_present("a${DEBUG_FLAG}");
let connector = hyper_rustls::HttpsConnectorBuilder::new().with_native_roots()
.https_or_http()
.enable_http1()
.enable_http2()
.build();
match Engine::new(matches, connector).await {
Err(err) => {
exit_status = err.exit_code;
writeln!(io::stderr(), "{}", err).ok();
},
Ok(engine) => {
if let Err(doit_err) = engine.doit().await {
exit_status = 1;
match doit_err {
DoitError::IoError(path, err) => {
writeln!(io::stderr(), "Failed to open output file '{}': {}", path, err).ok();
},
DoitError::ApiError(err) => {
if debug {
writeln!(io::stderr(), "{:#?}", err).ok();
} else {
writeln!(io::stderr(), "{}", err).ok();
}
}
}
}
}
}
std::process::exit(exit_status);
}

View File

@@ -0,0 +1,28 @@
<%
from generator.lib.util import (put_and, new_context)
from generator.lib.cli import (subcommand_md_filename, mangle_subcommand, pretty)
c = new_context(schemas, resources, context.get('methods'))
%>\
<%namespace name="util" file="../../lib/util.mako"/>\
site_name: ${util.canonical_name()} v${util.crate_version()}
site_url: ${cargo.doc_base_url}/${util.crate_name()}
site_description: A complete library to interact with ${util.canonical_name()} (protocol ${version})
repo_url: ${util.github_source_root_url()}
docs_dir: ${mkdocs.docs_dir}
site_dir: ${mkdocs.site_dir}
pages:
- ['index.md', 'Home']
% for resource in sorted(c.rta_map.keys()):
% for method in sorted(c.rta_map[resource]):
- ['${subcommand_md_filename(resource, method)}', '${pretty(resource)}', '${pretty(method)}']
% endfor # each method
% endfor # each resource
theme: readthedocs
copyright: Copyright &copy; ${copyright.years}, ${put_and(["`%s`" % a for a in copyright.authors])}

View File

@@ -0,0 +1,214 @@
# DO NOT EDIT !
# This file was generated automatically by '${self.uri}'
# DO NOT EDIT !
<%
import os
import json
apis = {}
api_info = []
doc_root = directories.output + '/' + directories.doc_subdir
doc_index = doc_root + '/index.html'
def to_doc_root(gen_root, crate_name):
if make.documentation_engine == 'mkdocs':
return gen_root + '/' + mkdocs.site_dir
else:
return gen_root + '/target/doc/' + util.to_extern_crate_name(crate_name)
# end utility
central_api_index = lambda crate_name: doc_root + '/' + util.to_extern_crate_name(crate_name) + '/index.html'
if os.environ.get('FETCH_APIS') is not None:
import urllib3
http = urllib3.PoolManager()
# Seems like connecting to https stopped working, so download the json below manually and put it into
# apis.json in the repository root.
discovery_url = 'https://www.googleapis.com/discovery/v1/apis'
apis = json.loads(open("apis.json", "r").read())
print('Loaded {} apis from Google'.format(len(apis['items'])))
for manualy_api in api.get('manually_added', list()):
apis['items'].append({
'name': manualy_api['name'],
'version': manualy_api['version'],
'discoveryRestUrl': manualy_api['discovery_rest_url']
})
print('Total {} apis'.format(len(apis['items'])))
json_api_targets = []
suffix = make.target_suffix
agsuffix = make.aggregated_target_suffix
global_targets = make.get('global_targets', False)
post_processor_arg = ''
if mako is not UNDEFINED:
post_processor_arg = '--post-process-python-module=%s' % mako.post_processor_module
%>\
% for an, versions in api.list.items():
% if an in api.get('blacklist', list()):
<% continue %>\
% endif
% for version in versions:
% if an + '-' + version in api.get('blacklist', list()):
<% continue %>\
% endif
<%
import generator.lib.util as util
import os
import json
def gen_type_cfg_path(id):
return '$(API_DIR)/type-' + id + '.yaml'
CMN_SRC = '/src/client.rs'
api_name = util.library_name(an, version)
api_target = util.target_directory_name(an, version, suffix)
depends_on_target = ''
if make.depends_on_suffix is not None:
depends_on_target = directories.output + '/' + util.target_directory_name(an, version, make.depends_on_suffix) + CMN_SRC
crate_name = util.library_to_crate_name(api_name, suffix)
gen_root = directories.output + '/' + api_target
gen_root_stamp = gen_root + '/.timestamp'
api_common = gen_root + CMN_SRC
api_clean = api_target + '-clean'
api_cargo = api_target + '-cargo'
api_doc = api_target + '-doc'
api_doc_root = to_doc_root(gen_root, crate_name)
api_doc_index = api_doc_root + '/index.html'
# source, destination of individual output files
sds = [(directories.mako_src + '/' + make.id + '/' + i.source + '.mako', gen_root + '/' +
i.get('output_dir', '') + '/' + i.source.strip('../')) for i in make.templates]
api_json = util.api_json_path(directories.api_base, an, version)
api_meta_dir = os.path.dirname(api_json)
print('Loading JSON: {}'.format(api_json))
try:
with open(api_json, 'r') as fh:
crate_version = util.crate_version(cargo.build_version + make.aggregated_target_suffix, json.load(fh).get('revision', '00000000'))
api_crate_publish_file = api_meta_dir + '/crates/' + crate_version
api_json_overrides = api_meta_dir + '/' + an + '-api_overrides.yaml'
type_specific_cfg = gen_type_cfg_path(make.id)
api_json_inputs = api_json + ' $(API_SHARED_INFO) ' + type_specific_cfg
if os.path.isfile(api_json_overrides):
api_json_inputs += ' ' + api_json_overrides
api_info.append((api_target, api_clean, api_cargo, api_doc, api_crate_publish_file, gen_root))
space_join = lambda i: ' '.join(a[i] for a in api_info)
except Exception as e:
print('Could not open JSON file at {}'.format(api_json))
print(e)
%>\
${api_common}: $(RUST_SRC)/${make.id}/client.rs $(lastword $(MAKEFILE_LIST)) ${gen_root_stamp}
@ echo "// COPY OF '$<'" > $@
@ echo "// DO NOT EDIT" >> $@
@cat $< >> $@
${gen_root_stamp}: $(MAKO_RENDER) ${' '.join(i[0] for i in sds)} ${api_json_inputs} $(MAKO_STANDARD_DEPENDENCIES) ${depends_on_target}
@echo Generating ${api_target}
$(MAKO) -io ${' '.join("%s=%s" % (s, d) for s, d in sds)} ${post_processor_arg} --data-files ${api_json_inputs}
@touch $@
${api_target}: ${api_common}
${api_crate_publish_file}: ${api_target}
cd ${gen_root} && cargo smart-release --execute --no-changelog -b keep
@mkdir -p ${os.path.dirname(api_crate_publish_file)}
touch $@
${api_cargo}: ${api_target}
cd ${gen_root} && cargo $(ARGS)
${api_doc_index}: ${api_common}
% if make.documentation_engine == 'rustdoc':
cd ${gen_root} && cargo doc
@echo "Docs for ${api_target} at $@"
% else:
@echo mkdocs ${api_doc_index}
## Our README is the landing page, and thus will serve multiple roles at once !
@cd ${gen_root} && (mkdir -p ${mkdocs.docs_dir} && cd ${mkdocs.docs_dir} && ln -s ../README.md index.md &>/dev/null) || : && $(MKDOCS) build --clean
% endif
${api_doc}: ${api_doc_index}
${central_api_index(crate_name)}: ${api_doc_index}
@test ! -d ${doc_root} && mkdir -p target/doc && ln -s `pwd`/target/doc ${doc_root} || :
% if make.documentation_engine == 'mkdocs':
cp -Rf ${api_doc_root} $(dir $@)
% endif
${api_clean}:
-rm -Rf ${gen_root}
% endfor
% endfor
clean-all${agsuffix}: ${space_join(1)}
cargo${agsuffix}: ${space_join(2)}
publish${agsuffix}: | gen-all${agsuffix} ${space_join(4)}
gen-all${agsuffix}: ${space_join(0)}
% if global_targets:
${doc_index}: docs-cli ${gen_type_cfg_path('cli')}
$(MAKO) --var DOC_ROOT=${doc_root} -io $(MAKO_SRC)/index.html.mako=$@ --data-files $(API_SHARED_INFO) $(API_LIST)
@echo Documentation index created at '$@'
docs-all: ${doc_index}
docs-all-clean:
rm -Rf ${doc_root}
github-pages: | docs-all-clean docs-all
$(GHP_IMPORT) -n ${doc_root}
## Have to force-push - allows us to start docs fresh, clearing out unused history
git push origin +gh-pages
.PHONY += github-pages docs-all docs-all-clean
% endif
docs${agsuffix}: ${' '.join(central_api_index(util.library_to_crate_name(a[0])) for a in api_info)} $(MAKO_STANDARD_DEPENDENCIES)
.PHONY = $(.PHONY) help${agsuffix} clean${agsuffix} cargo${agsuffix} publish${agsuffix} gen-all${agsuffix} ${space_join(0)} ${space_join(1)} ${space_join(2)} ${space_join(3)}
help${agsuffix}:
$(info gen-all${agsuffix} - make all ${make.target_name})
$(info docs${agsuffix} - make all ${make.target_name} documentation)
$(info clean-all${agsuffix} - delete all generated ${make.target_name})
$(info cargo${agsuffix} - run cargo on all ${make.target_name}, use ARGS="args ..." to specify cargo arguments)
$(info publish${agsuffix} - run cargo publish on all ${make.target_name} and remember successful ones with marker files)
% for a in api_info:
$(info ${a[0]} - build the ${a[0]} api)
$(info ${a[1]} - clean all generated files of the ${a[0]} api)
$(info ${a[2]} - run cargo on the ${a[0]} api, using given ARGS="arg1 ...")
$(info ${a[3]} - run cargo doc on the ${a[0]}")
% endfor
% if global_targets:
.PHONY += update-json
% for info in (apis.get('items') or []):
<%
import generator.lib.util as util
import os
name = util.normalize_library_name(info['name'])
target = util.api_json_path(directories.api_base, name, info['version'])
target_dir = os.path.dirname(target)
## assure the target never actually exists to force it to wget whenver we ask !
fake_target = target + '-force'
## Some service urls have $ in them. This may cause the console to treat them as env vars.
## To handle this properly, we need to escape the $.
url = info['discoveryRestUrl'].replace("$", "$$")
json_api_targets.append(fake_target)
%>\
${fake_target}: $(PYTHON_BIN)
@mkdir -p ${target_dir}
-curl --silent --show-error --fail --retry 3 -o '${target}' '${url}'
$(PYTHON) $(SORT_JSON_FILE) --skip-missing-file '${target}' || rm ${target}
% endfor
update-json: ${' '.join(json_api_targets)}
$(PYTHON) $(API_VERSION_GEN) $(API_DIR) $(API_LIST) $(API_LIST)
% endif

View File

@@ -0,0 +1,144 @@
<%
import json
import os
import yaml
from generator.lib.util import (api_json_path, library_name, library_to_crate_name,
gen_crate_dir, api_index, crates_io_url, program_name,
crate_version)
title = 'Google Service Documentation for Rust'
# A poor mans merge, just for what we need right now
def merge_required_fields(map):
map.cargo.build_version = cargo.build_version
return map
# type cache: {'api': type-api.yaml-contents }
tc = dict()
for api_type in make.types:
data = yaml.load_all(open(os.path.join(directories.api_base, 'type-%s.yaml' % api_type)))
tc[api_type] = merge_required_fields(type(directories)(next(data)))
# end for each type to load cache for
%>\
<!DOCTYPE html>
<!--
DO NOT EDIT !
This file was generated automatically by '${self.uri}'
DO NOT EDIT !
-->
<html lang="en">
<head>
<link rel="stylesheet"
href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css"
integrity="sha384-BVYiiSIFeK1dGmJRAkycuHAHRg32OmUcww7on3RYdg4Va+PmSTsz/K68vbdEjh4u"
crossorigin="anonymous">
<script type="text/javascript">
alertShown = false
function onClick(button) {
selectElementContents(button)
if (document.execCommand('copy') && !alertShown) {
msg = "Installation script copied to clipboard.\n"
msg += "\nIt contains no new-lines and will \n"
msg += "not execute automatically after\n"
msg += "pasting it into a shell so you can\n"
msg += "review it beforehand.\n"
msg += "\nThis message will not be shown again."
alert(msg)
alertShown = true
}
}
function selectElementContents(el) {
if (window.getSelection && document.createRange) {
var sel = window.getSelection()
var range = document.createRange()
range.selectNodeContents(el)
sel.removeAllRanges()
sel.addRange(range)
} else if (document.selection && document.body.createTextRange) {
var textRange = document.body.createTextRange()
textRange.moveToElementText(el)
textRange.select()
}
}
function onCopy(e) {
installation_script = '{ command -v rustup 2>&1 >/dev/null || curl https://sh.rustup.rs -sSf | sh } && ' + e.target.textContent
e.clipboardData.setData('text/plain', installation_script);
e.preventDefault()
}
</script>
<title>${title}</title>
</head>
<body>
<div class="container">
<h1>${title}</h1>
<table class="table table-hover">
<thead>
<tr>
<th>API Name</th>
<th>API Docs</th>
<th>CLI Docs</th>
<th>Install</th>
</tr>
</thead>
<tbody>
% for name in sorted(api.list.keys()):
% if name in api.blacklist:
<% continue %>\
% endif
% for version in api.list[name]:
<tr>
<%
type_names = ["api", "cli"]
assert set(type_names) == set(tc.keys()), "The type cache has changed, make sure to update the documentation accordingly"
with open(api_json_path(directories.api_base, name, version)) as fp:
metadata = json.load(fp)
if metadata is None:
continue
api_data = tc["api"]
revision = metadata.get('revision', None)
api_link = api_index(DOC_ROOT, name, version, api_data['make'],
api_data['cargo'], revision)
crates_link = crates_io_url(name, version)
crates_link += "/"
crates_link += crate_version(api_data.cargo.build_version, revision)
cli_data = tc["cli"]
cli_link = api_index(DOC_ROOT, name, version, cli_data['make'],
cli_data['cargo'], revision)
%>\
<td>${name} (${version})</td>
<td>
<a href="${api_link}" title="API docs for the ${name} ${version}">API</a>
<a href="${crates_link}">
<img src="${url_info.asset_urls.crates_img}"
title="This API on crates.io" height="16" width="16"/>
</a>
</td>
<td>
<a href="${cli_link}" title="CLI docs for the ${name} ${version}">
CLI
</a>
</td>
<td>
<button class="mono" onclick="onClick(this)"
oncopy="onCopy(event)"
title="Copy complete installation script to clipboard">
cargo install ${library_to_crate_name(library_name(name, version))}-cli
</button>
</td>
</tr>
% endfor # each version
% endfor # each API
</tbody>
</table>
</div>
</body>
</html>

View File

@@ -0,0 +1,2 @@
# Required for async keyword to be recognised
edition = "2018"